diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000000..68aeed3f77c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,28 @@ +--- +name: Bug report +about: Create a report to help us improve +labels: C-bug +--- + + + +**Problem** + + + +**Steps** + +1. +2. +3. + +**Possible Solution(s)** + + + +**Notes** + +Output of `cargo version`: + + + diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000000..68ca4a1b873 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,16 @@ +--- +name: Feature request +about: Suggest an idea for this project +labels: C-feature-request +--- + + + +**Describe the problem you are trying to solve** + + +**Describe the solution you'd like** + + +**Notes** + diff --git a/.gitignore b/.gitignore index f40e923a134..5274e5d529f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ -/target +target +Cargo.lock .cargo /config.stamp /Makefile @@ -6,5 +7,8 @@ src/doc/build src/etc/*.pyc src/registry/target -src/registry/Cargo.lock rustc +__pycache__ +.idea/ +*.iml +*.swp diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index b40d613a41a..00000000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "src/rust-installer"] - path = src/rust-installer - url = https://github.com/rust-lang/rust-installer.git diff --git a/.travis.install.deps.sh b/.travis.install.deps.sh deleted file mode 100755 index 387cf1dd0cf..00000000000 --- a/.travis.install.deps.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh - -set -ex - -python src/etc/install-deps.py diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 318daa59b44..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,41 +0,0 @@ -language: rust -rust: - - stable - - beta - - nightly -sudo: false -script: - - ./configure --prefix=$HOME/cargo-install --disable-cross-tests - - make - - make test - - make distcheck - - make doc - - make install - - make uninstall -after_success: | - [ $TRAVIS_BRANCH = master ] && - [ $TRAVIS_PULL_REQUEST = false ] && - [ $(uname -s) = Linux ] && - pip install ghp-import --user $USER && - $HOME/.local/bin/ghp-import -n target/doc && - git push -qf https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages -env: - global: - - secure: scGpeetUfba5RWyuS4yt10bPoFAI9wpHEReIFqEx7eH5vr2Anajk6+70jW6GdrWVdUvdINiArlQ3An2DeB9vEUWcBjw8WvuPtOH0tDMoSsuVloPlFD8yn1Ac0Bx9getAO5ofxqtoNg+OV4MDVuGabEesqAOWqURNrBC7XK+ntC8= - -os: - - linux - - osx - -branches: - only: - - master - -addons: - apt: - sources: - - kalakris-cmake - packages: - - cmake - - g++-multilib - - lib32stdc++6 diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md new file mode 100644 index 00000000000..f0acf00ad7e --- /dev/null +++ b/ARCHITECTURE.md @@ -0,0 +1,150 @@ +# Cargo Architecture + +This document gives a high level overview of Cargo internals. You may +find it useful if you want to contribute to Cargo or if you are +interested in the inner workings of Cargo. + +The purpose of Cargo is to formalize a canonical Rust workflow, by automating +the standard tasks associated with distributing software. Cargo simplifies +structuring a new project, adding dependencies, writing and running unit tests, +and more. + + +## Subcommands + +Cargo is a single binary composed of a set of [`clap`] subcommands. All subcommands live in +`src/bin/cargo/commands` directory. `src/bin/cargo/main.rs` is the entry point. + +Each subcommand, such as [`src/bin/cargo/commands/build.rs`], has its own API +interface, similarly to Git's, parsing command line options, reading the +configuration files, discovering the Cargo project in the current directory and +delegating the actual implementation to one +of the functions in [`src/cargo/ops/mod.rs`]. This short file is a good +place to find out about most of the things that Cargo can do. +Subcommands are designed to pipe to one another, and custom subcommands make +Cargo easy to extend and attach tools to. + +[`clap`]: https://clap.rs/ +[`src/bin/cargo/commands/build.rs`]: src/bin/cargo/commands/build.rs +[`src/cargo/ops/mod.rs`]: src/cargo/ops/mod.rs + + +## Important Data Structures + +There are some important data structures which are used throughout +Cargo. + +[`Config`] is available almost everywhere and holds "global" +information, such as `CARGO_HOME` or configuration from +`.cargo/config` files. The [`shell`] method of [`Config`] is the entry +point for printing status messages and other info to the console. + +[`Workspace`] is the description of the workspace for the current +working directory. Each workspace contains at least one +[`Package`]. Each package corresponds to a single `Cargo.toml`, and may +define several [`Target`]s, such as the library, binaries, integration +test or examples. Targets are crates (each target defines a crate +root, like `src/lib.rs` or `examples/foo.rs`) and are what is actually +compiled by `rustc`. + +A typical package defines the single library target and several +auxiliary ones. Packages are a unit of dependency in Cargo, and when +package `foo` depends on package `bar`, that means that each target +from `foo` needs the library target from `bar`. + +[`PackageId`] is the unique identifier of a (possibly remote) +package. It consist of three components: name, version and source +id. Source is the place where the source code for package comes +from. Typical sources are crates.io, a git repository or a folder on +the local hard drive. + +[`Resolve`] is the representation of a directed acyclic graph of package +dependencies, which uses [`PackageId`]s for nodes. This is the data +structure that is saved to the lock file. If there is no lock file, +Cargo constructs a resolve by finding a graph of packages which +matches declared dependency specification according to semver. + +[`Config`]: https://docs.rs/cargo/latest/cargo/util/config/struct.Config.html +[`shell`]: https://docs.rs/cargo/latest/cargo/util/config/struct.Config.html#method.shell +[`Workspace`]: https://docs.rs/cargo/latest/cargo/core/struct.Workspace.html +[`Package`]: https://docs.rs/cargo/latest/cargo/core/package/struct.Package.html +[`Target`]: https://docs.rs/cargo/latest/cargo/core/manifest/struct.Target.html +[`PackageId`]: https://docs.rs/cargo/latest/cargo/core/package_id/struct.PackageId.html +[`Resolve`]: https://docs.rs/cargo/latest/cargo/core/struct.Resolve.html + + +## Persistence + +Cargo is a non-daemon command line application, which means that all +the information used by Cargo must be persisted on the hard drive. The +main sources of information are `Cargo.toml` and `Cargo.lock` files, +`.cargo/config` configuration files and the globally shared registry +of packages downloaded from crates.io, usually located at +`~/.cargo/registry`. See [`src/cargo/sources/registry`] for the specifics of +the registry storage format. + +[`src/cargo/sources/registry`]: src/cargo/sources/registry + + +## Concurrency + +Cargo is mostly single threaded. The only concurrency inside a single +instance of Cargo happens during compilation, when several instances +of `rustc` are invoked in parallel to build independent +targets. However there can be several different instances of Cargo +process running concurrently on the system. Cargo guarantees that this +is always safe by using file locks when accessing potentially shared +data like the registry or the target directory. + + +## Tests + +Cargo has an impressive test suite located in the `tests` folder. Most +of the test are integration: a project structure with `Cargo.toml` and +rust source code is created in a temporary directory, `cargo` binary +is invoked via `std::process::Command` and then stdout and stderr are +verified against the expected output. To simplify testing, several +macros of the form `[MACRO]` are used in the expected output. For +example, `[..]` matches any string. + +To see stdout and stderr streams of the subordinate process, add `.stream()` +call to the built-up `Execs`: + +```rust +// Before +p.cargo("run").run(); + +// After +p.cargo("run").stream().run(); +``` + +Alternatively to build and run a custom version of cargo simply run `cargo build` +and execute `target/debug/cargo`. Note that `+nightly`/`+stable` (and variants), +being [rustup] features, won't work when executing the locally +built cargo binary directly, you have to instead build with `cargo +nightly build` +and run with `rustup run` (e.g `rustup run nightly +/target/debug/cargo ..`) (or set the `RUSTC` env var to point +to nightly rustc). + +[rustup]: https://rustup.rs/ + + +## Logging + +Cargo uses [`env_logger`], so you can set +`CARGO_LOG` environment variable to get the logs. This is useful both for diagnosing +bugs in stable Cargo and for local development. Cargo also has internal hierarchical +profiling infrastructure, which is activated via `CARGO_PROFILE` variable + +``` +# Outputs all logs with levels debug and higher +$ CARGO_LOG=debug cargo generate-lockfile + +# Don't forget that you can filter by module as well +$ CARGO_LOG=cargo::core::resolver=trace cargo generate-lockfile + +# Output first three levels of profiling info +$ CARGO_PROFILE=3 cargo generate-lockfile +``` + +[`env_logger`]: https://docs.rs/env_logger/*/env_logger/ diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000000..c3c1a7f08ca --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,625 @@ +# Changelog + +## Cargo 1.38 (2019-09-26) +[4c1fa54d...HEAD](https://github.com/rust-lang/cargo/compare/4c1fa54d...HEAD) + +### Added + +### Changed + +### Fixed +- (Nightly only): Fixed exponential blowup when using CARGO_BUILD_PIPELINING. + [#7062](https://github.com/rust-lang/cargo/pull/7062) +- Fixed using the wrong directory when updating git repositories when using + the `git-fetch-with-cli` config option, and the `GIT_DIR` environment + variable is set. This may happen when running cargo from git callbacks. + [#7082](https://github.com/rust-lang/cargo/pull/7082) + +## Cargo 1.37 (2019-08-15) +[c4fcfb72...4c1fa54d](https://github.com/rust-lang/cargo/compare/c4fcfb72...4c1fa54d) + +### Added +- Added `doctest` field to `cargo metadata` to determine if a target's + documentation is tested. + [#6953](https://github.com/rust-lang/cargo/pull/6953) + [#6965](https://github.com/rust-lang/cargo/pull/6965) +- (Nightly only): Added [compiler message + caching](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#cache-messages). + The `-Z cache-messages` flag makes cargo cache the compiler output so that + future runs can redisplay previous warnings. + [#6933](https://github.com/rust-lang/cargo/pull/6933) +- 🔥 The [`cargo + vendor`](https://doc.rust-lang.org/nightly/cargo/commands/cargo-vendor.html) + command is now built-in to Cargo. This command may be used to create a local + copy of the sources of all dependencies. + [#6869](https://github.com/rust-lang/cargo/pull/6869) +- 🔥 The "publish lockfile" feature is now stable. This feature will + automatically include the `Cargo.lock` file when a package is published if + it contains a binary executable target. By default, Cargo will ignore + `Cargo.lock` when installing a package. To force Cargo to use the + `Cargo.lock` file included in the published package, use `cargo install + --locked`. This may be useful to ensure that `cargo install` consistently + reproduces the same result. It may also be useful when a semver-incompatible + change is accidentally published to a dependency, providing a way to fall + back to a version that is known to work. + [#7026](https://github.com/rust-lang/cargo/pull/7026) +- 🔥 The `default-run` feature has been stabilized. This feature allows you to + specify which binary executable to run by default with `cargo run` when a + package includes multiple binaries. Set the `default-run` key in the + `[package]` table in `Cargo.toml` to the name of the binary to use by + default. + [#7056](https://github.com/rust-lang/cargo/pull/7056) + +### Changed +- `cargo package` now verifies that build scripts do not create empty + directories. + [#6973](https://github.com/rust-lang/cargo/pull/6973) +- A warning is now issued if `cargo doc` generates duplicate outputs, which + causes files to be randomly stomped on. This may happen for a variety of + reasons (renamed dependencies, multiple versions of the same package, + packages with renamed libraries, etc.). This is a known bug, which needs + more work to handle correctly. + [#6998](https://github.com/rust-lang/cargo/pull/6998) +- Enabling a dependency's feature with `--features foo/bar` will no longer + compile the current crate with the `foo` feature if `foo` is not an optional + dependency. + [#7010](https://github.com/rust-lang/cargo/pull/7010) +- If `--remap-path-prefix` is passed via RUSTFLAGS, it will no longer affect + the filename metadata hash. + [#6966](https://github.com/rust-lang/cargo/pull/6966) +- libgit2 has been updated to 0.28.2, which Cargo uses to access git + repositories. This brings in hundreds of changes and fixes since it was last + updated in November. + [#7018](https://github.com/rust-lang/cargo/pull/7018) +- Cargo now supports absolute paths in the dep-info files generated by rustc. + This is laying the groundwork for [tracking + binaries](https://github.com/rust-lang/rust/pull/61727), such as libstd, for + rebuild detection. (Note: this contains a known bug.) + [#7030](https://github.com/rust-lang/cargo/pull/7030) +- (Nightly only): `-Z mtime-on-use` no longer touches intermediate artifacts. + [#7050](https://github.com/rust-lang/cargo/pull/7050) + +### Fixed +- Fixed how zsh completions fetch the list of commands. + [#6956](https://github.com/rust-lang/cargo/pull/6956) +- "+ debuginfo" is no longer printed in the build summary when `debug` is set + to 0. + [#6971](https://github.com/rust-lang/cargo/pull/6971) +- Fixed `cargo doc` with an example configured with `doc = true` to document + correctly. + [#7023](https://github.com/rust-lang/cargo/pull/7023) + +## Cargo 1.36 (2019-07-04) +[6f3e9c36...c4fcfb72](https://github.com/rust-lang/cargo/compare/6f3e9c36...c4fcfb72) + +### Added +- (Nightly only): Added [`-Z install-upgrade` + feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#install-upgrade) + to track details about installed crates and to update them if they are + out-of-date. [#6798](https://github.com/rust-lang/cargo/pull/6798) +- (Nightly only): Added the [`public-dependency` + feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#public-dependency) + which allows tracking public versus private dependencies. + [#6772](https://github.com/rust-lang/cargo/pull/6772) +- Added more detailed documentation on target auto-discovery. + [#6898](https://github.com/rust-lang/cargo/pull/6898) +- (Nightly only): Added build pipelining via the `build.pipelining` config + option (`CARGO_BUILD_PIPELINING` env var). + [#6883](https://github.com/rust-lang/cargo/pull/6883) +- 🔥 Stabilize the `--offline` flag which allows using cargo without a network + connection. + [#6934](https://github.com/rust-lang/cargo/pull/6934) + [#6871](https://github.com/rust-lang/cargo/pull/6871) + +### Changed +- `publish = ["crates-io"]` may be added to the manifest to restrict + publishing to crates.io only. + [#6838](https://github.com/rust-lang/cargo/pull/6838) +- macOS: Only include the default paths if `DYLD_FALLBACK_LIBRARY_PATH` is not + set. Also, remove `/lib` from the default set. + [#6856](https://github.com/rust-lang/cargo/pull/6856) +- `cargo publish` will now exit early if the login token is not available. + [#6854](https://github.com/rust-lang/cargo/pull/6854) +- HTTP/2 stream errors are now considered "spurious" and will cause a retry. + [#6861](https://github.com/rust-lang/cargo/pull/6861) +- (Nightly only): The `publish-lockfile` feature has had some significant + changes. The default is now `true`, the `Cargo.lock` will always be + published for binary crates. The `Cargo.lock` is now regenerated during + publishing. `cargo install` now ignores the `Cargo.lock` file by default, + and requires `--locked` to use the lock file. Warnings have been added if + yanked dependencies are detected. + [#6840](https://github.com/rust-lang/cargo/pull/6840) +- Setting a feature on a dependency where that feature points to a *required* + dependency is now an error. Previously it was a warning. + [#6860](https://github.com/rust-lang/cargo/pull/6860) +- The `registry.index` config value now supports relative `file:` URLs. + [#6873](https://github.com/rust-lang/cargo/pull/6873) +- macOS: The `.dSYM` directory is now symbolically linked next to example + binaries without the metadata hash so that debuggers can find it. + [#6891](https://github.com/rust-lang/cargo/pull/6891) +- The default `Cargo.toml` template for now projects now includes a comment + providing a link to the documentation. + [#6881](https://github.com/rust-lang/cargo/pull/6881) +- Some improvements to the wording of the crate download summary. + [#6916](https://github.com/rust-lang/cargo/pull/6916) + [#6920](https://github.com/rust-lang/cargo/pull/6920) +- ✨ Changed `RUST_LOG` environment variable to `CARGO_LOG` so that user code + that uses the `log` crate will not display cargo's debug output. + [#6918](https://github.com/rust-lang/cargo/pull/6918) +- `Cargo.toml` is now always included when packaging, even if it is not listed + in `package.include`. + [#6925](https://github.com/rust-lang/cargo/pull/6925) +- Package include/exclude values now use gitignore patterns instead of glob + patterns. [#6924](https://github.com/rust-lang/cargo/pull/6924) +- Provide a better error message when crates.io times out. Also improve error + messages with other HTTP response codes. + [#6936](https://github.com/rust-lang/cargo/pull/6936) + +### Performance +- Resolver performance improvements for some cases. + [#6853](https://github.com/rust-lang/cargo/pull/6853) +- Optimized how cargo reads the index JSON files by caching the results. + [#6880](https://github.com/rust-lang/cargo/pull/6880) + [#6912](https://github.com/rust-lang/cargo/pull/6912) + [#6940](https://github.com/rust-lang/cargo/pull/6940) +- Various performance improvements. + [#6867](https://github.com/rust-lang/cargo/pull/6867) + +### Fixed +- More carefully track the on-disk fingerprint information for dependencies. + This can help in some rare cases where the build is interrupted and + restarted. [#6832](https://github.com/rust-lang/cargo/pull/6832) +- `cargo run` now correctly passes non-UTF8 arguments to the child process. + [#6849](https://github.com/rust-lang/cargo/pull/6849) +- Fixed bash completion to run on bash 3.2, the stock version in macOS. + [#6905](https://github.com/rust-lang/cargo/pull/6905) +- Various fixes and improvements to zsh completion. + [#6926](https://github.com/rust-lang/cargo/pull/6926) + [#6929](https://github.com/rust-lang/cargo/pull/6929) +- Fix `cargo update` ignoring `-p` arguments if the `Cargo.lock` file was + missing. + [#6904](https://github.com/rust-lang/cargo/pull/6904) + +## Cargo 1.35 (2019-05-23) +[6789d8a0...6f3e9c36](https://github.com/rust-lang/cargo/compare/6789d8a0...6f3e9c36) + +### Added +- Added the `rustc-cdylib-link-arg` key for build scripts to specify linker + arguments for cdylib crates. + [#6298](https://github.com/rust-lang/cargo/pull/6298) +- (Nightly only): `cargo clippy-preview` is now a built-in cargo command. + [#6759](https://github.com/rust-lang/cargo/pull/6759) + +### Changed +- When passing a test filter, such as `cargo test foo`, don't build examples + (unless they set `test = true`). + [#6683](https://github.com/rust-lang/cargo/pull/6683) +- Forward the `--quiet` flag from `cargo test` to the libtest harness so that + tests are actually quiet. + [#6358](https://github.com/rust-lang/cargo/pull/6358) +- The verification step in `cargo package` that checks if any files are + modified is now stricter. It uses a hash of the contents instead of checking + filesystem mtimes. It also checks *all* files in the package. + [#6740](https://github.com/rust-lang/cargo/pull/6740) +- Jobserver tokens are now released whenever Cargo blocks on a file lock. + [#6748](https://github.com/rust-lang/cargo/pull/6748) +- Issue a warning for a previous bug in the TOML parser that allowed multiple + table headers with the same name. + [#6761](https://github.com/rust-lang/cargo/pull/6761) +- Removed the `CARGO_PKG_*` environment variables from the metadata hash and + added them to the fingerprint instead. This means that when these values + change, stale artifacts are not left behind. Also added the "repository" + value to the fingerprint. + [#6785](https://github.com/rust-lang/cargo/pull/6785) +- `cargo metadata` no longer shows a `null` field for a dependency without a + library in `resolve.nodes.deps`. The dependency is no longer shown. + [#6534](https://github.com/rust-lang/cargo/pull/6534) +- `cargo new` will no longer include an email address in the `authors` field + if it is set to the empty string. + [#6802](https://github.com/rust-lang/cargo/pull/6802) +- `cargo doc --open` now works when documenting multiple packages. + [#6803](https://github.com/rust-lang/cargo/pull/6803) +- `cargo install --path P` now loads the `.cargo/config` file from the + directory P. [#6805](https://github.com/rust-lang/cargo/pull/6805) +- Using semver metadata in a version requirement (such as `1.0.0+1234`) now + issues a warning that it is ignored. + [#6806](https://github.com/rust-lang/cargo/pull/6806) +- `cargo install` now rejects certain combinations of flags where some flags + would have been ignored. + [#6801](https://github.com/rust-lang/cargo/pull/6801) +- (Nightly only): The `build-override` profile setting now includes + proc-macros and their dependencies. + [#6811](https://github.com/rust-lang/cargo/pull/6811) +- Resolver performance improvements for some cases. + [#6776](https://github.com/rust-lang/cargo/pull/6776) +- (Nightly only): Optional and target dependencies now work better with `-Z + offline`. [#6814](https://github.com/rust-lang/cargo/pull/6814) + +### Fixed +- Fixed running separate commands (such as `cargo build` then `cargo test`) + where the second command could use stale results from a build script. + [#6720](https://github.com/rust-lang/cargo/pull/6720) +- Fixed `cargo fix` not working properly if a `.gitignore` file that matched + the root package directory. + [#6767](https://github.com/rust-lang/cargo/pull/6767) +- Fixed accidentally compiling a lib multiple times if `panic=unwind` was set + in a profile. [#6781](https://github.com/rust-lang/cargo/pull/6781) +- Paths to JSON files in `build.target` config value are now canonicalized to + fix building dependencies. + [#6778](https://github.com/rust-lang/cargo/pull/6778) +- Fixed re-running a build script if its compilation was interrupted (such as + if it is killed). [#6782](https://github.com/rust-lang/cargo/pull/6782) +- Fixed `cargo new` initializing a fossil repo. + [#6792](https://github.com/rust-lang/cargo/pull/6792) +- Fixed supporting updating a git repo that has a force push when using the + `git-fetch-with-cli` feature. `git-fetch-with-cli` also shows more error + information now when it fails. + [#6800](https://github.com/rust-lang/cargo/pull/6800) +- `--example` binaries built for the WASM target are fixed to no longer + include a metadata hash in the filename, and are correctly emitted in the + `compiler-artifact` JSON message. + [#6812](https://github.com/rust-lang/cargo/pull/6812) + +## Cargo 1.34 (2019-04-11) +[f099fe94...6789d8a0](https://github.com/rust-lang/cargo/compare/f099fe94...6789d8a0) + +### Added +- 🔥 Stabilized support for [alternate + registries](https://doc.rust-lang.org/1.34.0/cargo/reference/registries.html). + [#6654](https://github.com/rust-lang/cargo/pull/6654) +- (Nightly only): Added `-Z mtime-on-use` flag to cause the mtime to be + updated on the filesystem when a crate is used. This is intended to be able + to track stale artifacts in the future for cleaning up unused files. + [#6477](https://github.com/rust-lang/cargo/pull/6477) + [#6573](https://github.com/rust-lang/cargo/pull/6573) +- Added documentation on using builds.sr.ht Continuous Integration with Cargo. + [#6565](https://github.com/rust-lang/cargo/pull/6565) +- `Cargo.lock` now includes a comment at the top that it is `@generated`. + [#6548](https://github.com/rust-lang/cargo/pull/6548) +- Azure DevOps badges are now supported. + [#6264](https://github.com/rust-lang/cargo/pull/6264) +- (Nightly only): Added experimental `-Z dual-proc-macros` to build proc + macros for both the host and the target. + [#6547](https://github.com/rust-lang/cargo/pull/6547) +- Added a warning if `--exclude` flag specifies an unknown package. + [#6679](https://github.com/rust-lang/cargo/pull/6679) + +### Changed +- `cargo test --doc --no-run` doesn't do anything, so it now displays an error + to that effect. [#6628](https://github.com/rust-lang/cargo/pull/6628) +- Various updates to bash completion: add missing options and commands, + support libtest completions, use rustup for `--target` completion, fallback + to filename completion, fix editing the command line. + [#6644](https://github.com/rust-lang/cargo/pull/6644) +- Publishing a crate with a `[patch]` section no longer generates an error. + The `[patch]` section is removed from the manifest before publishing. + [#6535](https://github.com/rust-lang/cargo/pull/6535) +- `build.incremental = true` config value is now treated the same as + `CARGO_INCREMENTAL=1`, previously it was ignored. + [#6688](https://github.com/rust-lang/cargo/pull/6688) +- Errors from a registry are now always displayed regardless of the HTTP + response code. [#6771](https://github.com/rust-lang/cargo/pull/6771) + +### Fixed +- Fixed bash completion for `cargo run --example`. + [#6578](https://github.com/rust-lang/cargo/pull/6578) +- Fixed a race condition when using a *local* registry and running multiple + cargo commands at the same time that build the same crate. + [#6591](https://github.com/rust-lang/cargo/pull/6591) +- Fixed some flickering and excessive updates of the progress bar. + [#6615](https://github.com/rust-lang/cargo/pull/6615) +- Fixed a hang when using a git credential helper that returns incorrect + credentials. [#6681](https://github.com/rust-lang/cargo/pull/6681) +- Fixed resolving yanked crates with a local registry. + [#6750](https://github.com/rust-lang/cargo/pull/6750) + +## Cargo 1.33 (2019-02-28) +[8610973a...f099fe94](https://github.com/rust-lang/cargo/compare/8610973a...f099fe94) + +### Added +- `compiler-artifact` JSON messages now include an `"executable"` key which + includes the path to the executable that was built. + [#6363](https://github.com/rust-lang/cargo/pull/6363) +- The man pages have been rewritten, and are now published with the web + documentation. [#6405](https://github.com/rust-lang/cargo/pull/6405) +- (Nightly only): Allow using registry *names* in `[patch]` tables instead of + just URLs. [#6456](https://github.com/rust-lang/cargo/pull/6456) +- `cargo login` now displays a confirmation after saving the token. + [#6466](https://github.com/rust-lang/cargo/pull/6466) +- A warning is now emitted if a `[patch]` entry does not match any package. + [#6470](https://github.com/rust-lang/cargo/pull/6470) +- `cargo metadata` now includes the `links` key for a package. + [#6480](https://github.com/rust-lang/cargo/pull/6480) +- (Nightly only): `cargo metadata` added the `registry` key for dependencies. + [#6500](https://github.com/rust-lang/cargo/pull/6500) +- "Very verbose" output with `-vv` now displays the environment variables that + cargo sets when it runs a process. + [#6492](https://github.com/rust-lang/cargo/pull/6492) +- `--example`, `--bin`, `--bench`, or `--test` without an argument now lists + the available targets for those options. + [#6505](https://github.com/rust-lang/cargo/pull/6505) +- Windows: If a process fails with an extended status exit code, a + human-readable name for the code is now displayed. + [#6532](https://github.com/rust-lang/cargo/pull/6532) +- Added `--features`, `--no-default-features`, and `--all-features` flags to + the `cargo package` and `cargo publish` commands to use the given features + when verifying the package. + [#6453](https://github.com/rust-lang/cargo/pull/6453) + +### Changed +- If `cargo fix` fails to compile the fixed code, the rustc errors are now + displayed on the console. + [#6419](https://github.com/rust-lang/cargo/pull/6419) +- (Nightly only): Registry names are now restricted to the same style as + package names (alphanumeric, `-` and `_` characters). + [#6469](https://github.com/rust-lang/cargo/pull/6469) +- (Nightly only): `cargo login` now displays the `/me` URL from the registry + config. [#6466](https://github.com/rust-lang/cargo/pull/6466) +- Hide the `--host` flag from `cargo login`, it is unused. + [#6466](https://github.com/rust-lang/cargo/pull/6466) +- (Nightly only): `cargo login --registry=NAME` now supports interactive input + for the token. [#6466](https://github.com/rust-lang/cargo/pull/6466) +- (Nightly only): Registries may now elide the `api` key from `config.json` to + indicate they do not support API access. + [#6466](https://github.com/rust-lang/cargo/pull/6466) +- Build script fingerprints now include the rustc version. + [#6473](https://github.com/rust-lang/cargo/pull/6473) +- macOS: Switched to setting `DYLD_FALLBACK_LIBRARY_PATH` instead of + `DYLD_LIBRARY_PATH`. [#6355](https://github.com/rust-lang/cargo/pull/6355) +- `RUSTFLAGS` is now included in the metadata hash, meaning that changing + the flags will not overwrite previously built files. + [#6503](https://github.com/rust-lang/cargo/pull/6503) +- When updating the crate graph, unrelated yanked crates were erroneously + removed. They are now kept at their original version if possible. This was + causing unrelated packages to be downgraded during `cargo update -p + somecrate`. [#5702](https://github.com/rust-lang/cargo/issues/5702) +- TOML files now support the [0.5 TOML + syntax](https://github.com/toml-lang/toml/blob/master/CHANGELOG.md#050--2018-07-11). + +### Fixed +- `cargo fix` will now ignore suggestions that modify multiple files. + [#6402](https://github.com/rust-lang/cargo/pull/6402) +- `cargo fix` will now only fix one target at a time, to deal with targets + which share the same source files. + [#6434](https://github.com/rust-lang/cargo/pull/6434) +- (Nightly only): Fixed panic when using `--message-format=json` with metabuild. + [#6432](https://github.com/rust-lang/cargo/pull/6432) +- Fixed bash completion showing the list of cargo commands. + [#6461](https://github.com/rust-lang/cargo/issues/6461) +- `cargo init` will now avoid creating duplicate entries in `.gitignore` + files. [#6521](https://github.com/rust-lang/cargo/pull/6521) +- (Nightly only): Fixed detection of publishing to crates.io when using + alternate registries. [#6525](https://github.com/rust-lang/cargo/pull/6525) +- Builds now attempt to detect if a file is modified in the middle of a + compilation, allowing you to build again and pick up the new changes. This + is done by keeping track of when the compilation *starts* not when it + finishes. Also, [#5919](https://github.com/rust-lang/cargo/pull/5919) was + reverted, meaning that cargo does *not* treat equal filesystem mtimes as + requiring a rebuild. [#6484](https://github.com/rust-lang/cargo/pull/6484) + +## Cargo 1.32 (2019-01-17) +[339d9f9c...8610973a](https://github.com/rust-lang/cargo/compare/339d9f9c...8610973a) + +### Added +- (Nightly only): Allow usernames in registry URLs. + [#6242](https://github.com/rust-lang/cargo/pull/6242) +- Registries may now display warnings after a successful publish. + [#6303](https://github.com/rust-lang/cargo/pull/6303) +- Added a [glossary](https://doc.rust-lang.org/cargo/appendix/glossary.html) + to the documentation. [#6321](https://github.com/rust-lang/cargo/pull/6321) +- Added the alias `c` for `cargo check`. + [#6218](https://github.com/rust-lang/cargo/pull/6218) +- (Nightly only): Added `"compile_mode"` key to the build-plan JSON structure + to be able to distinguish running a custom build script versus compiling the + build script. [#6331](https://github.com/rust-lang/cargo/pull/6331) + +### Changed +- 🔥 HTTP/2 multiplexing is now enabled by default. The `http.multiplexing` + config value may be used to disable it. + [#6271](https://github.com/rust-lang/cargo/pull/6271) +- Use ANSI escape sequences to clear lines instead of spaces. + [#6233](https://github.com/rust-lang/cargo/pull/6233) +- Disable git templates when checking out git dependencies, which can cause + problems. [#6252](https://github.com/rust-lang/cargo/pull/6252) +- Include the `--update-head-ok` git flag when using the + `net.git-fetch-with-cli` option. This can help prevent failures when + fetching some repositories. + [#6250](https://github.com/rust-lang/cargo/pull/6250) +- When extracting a crate during the verification step of `cargo package`, the + filesystem mtimes are no longer set, which was failing on some rare + filesystems. [#6257](https://github.com/rust-lang/cargo/pull/6257) +- `crate-type = ["proc-macro"]` is now treated the same as `proc-macro = true` + in `Cargo.toml`. [#6256](https://github.com/rust-lang/cargo/pull/6256) +- An error is raised if `dependencies`, `features`, `target`, or `badges` is + set in a virtual workspace. Warnings are displayed if `replace` or `patch` + is used in a workspace member. + [#6276](https://github.com/rust-lang/cargo/pull/6276) +- Improved performance of the resolver in some cases. + [#6283](https://github.com/rust-lang/cargo/pull/6283) + [#6366](https://github.com/rust-lang/cargo/pull/6366) +- `.rmeta` files are no longer hard-linked into the base target directory + (`target/debug`). [#6292](https://github.com/rust-lang/cargo/pull/6292) +- A warning is issued if multiple targets are built with the same output + filenames. [#6308](https://github.com/rust-lang/cargo/pull/6308) +- When using `cargo build` (without `--release`) benchmarks are now built + using the "test" profile instead of "bench". This makes it easier to debug + benchmarks, and avoids confusing behavior. + [#6309](https://github.com/rust-lang/cargo/pull/6309) +- User aliases may now override built-in aliases (`b`, `r`, `t`, and `c`). + [#6259](https://github.com/rust-lang/cargo/pull/6259) +- Setting `autobins=false` now disables auto-discovery of inferred targets. + [#6329](https://github.com/rust-lang/cargo/pull/6329) +- `cargo verify-project` will now fail on stable if the project uses unstable + features. [#6326](https://github.com/rust-lang/cargo/pull/6326) +- Platform targets with an internal `.` within the name are now allowed. + [#6255](https://github.com/rust-lang/cargo/pull/6255) +- `cargo clean --release` now only deletes the release directory. + [#6349](https://github.com/rust-lang/cargo/pull/6349) + +### Fixed +- Avoid adding extra angle brackets in email address for `cargo new`. + [#6243](https://github.com/rust-lang/cargo/pull/6243) +- The progress bar is disabled if the CI environment variable is set. + [#6281](https://github.com/rust-lang/cargo/pull/6281) +- Avoid retaining all rustc output in memory. + [#6289](https://github.com/rust-lang/cargo/pull/6289) +- If JSON parsing fails, and rustc exits nonzero, don't lose the parse failure + message. [#6290](https://github.com/rust-lang/cargo/pull/6290) +- (Nightly only): `--out-dir` no longer copies over build scripts. + [#6300](https://github.com/rust-lang/cargo/pull/6300) +- Fixed renaming a project directory with build scripts. + [#6328](https://github.com/rust-lang/cargo/pull/6328) +- Fixed `cargo run --example NAME` to work correctly if the example sets + `crate_type = ["bin"]`. + [#6330](https://github.com/rust-lang/cargo/pull/6330) +- Fixed issue with `cargo package` git discovery being too aggressive. The + `--allow-dirty` now completely disables the git repo checks. + [#6280](https://github.com/rust-lang/cargo/pull/6280) +- Fixed build change tracking for `[patch]` deps which resulted in `cargo + build` rebuilding when it shouldn't. + [#6493](https://github.com/rust-lang/cargo/pull/6493) + +## Cargo 1.31 (2018-12-06) +[36d96825...339d9f9c](https://github.com/rust-lang/cargo/compare/36d96825...339d9f9c) + +### Added +- 🔥 Stabilized support for the 2018 edition. + [#5984](https://github.com/rust-lang/cargo/pull/5984) + [#5989](https://github.com/rust-lang/cargo/pull/5989) +- 🔥 Added the ability to [rename + dependencies](https://doc.rust-lang.org/1.31.0/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml) + in Cargo.toml. [#6319](https://github.com/rust-lang/cargo/pull/6319) +- 🔥 Added support for HTTP/2 pipelining and multiplexing. Set the + `http.multiplexing` config value to enable. + [#6005](https://github.com/rust-lang/cargo/pull/6005) +- (Nightly only): Added `--registry` flag to `cargo install`. + [#6128](https://github.com/rust-lang/cargo/pull/6128) +- (Nightly only): Added `registry.default` configuration value to specify the + default registry to use if `--registry` flag is not passed. + [#6135](https://github.com/rust-lang/cargo/pull/6135) +- (Nightly only): Added `--registry` flag to `cargo new` and `cargo init`. + [#6135](https://github.com/rust-lang/cargo/pull/6135) +- Added `http.debug` configuration value to debug HTTP connections. Use + `CARGO_HTTP_DEBUG=true RUST_LOG=cargo::ops::registry cargo build` to display + the debug information. [#6166](https://github.com/rust-lang/cargo/pull/6166) +- `CARGO_PKG_REPOSITORY` environment variable is set with the repository value + from `Cargo.toml` when building . + [#6096](https://github.com/rust-lang/cargo/pull/6096) + +### Changed +- `cargo test --doc` now rejects other flags instead of ignoring them. + [#6037](https://github.com/rust-lang/cargo/pull/6037) +- `cargo install` ignores `~/.cargo/config`. + [#6026](https://github.com/rust-lang/cargo/pull/6026) +- `cargo version --verbose` is now the same as `cargo -vV`. + [#6076](https://github.com/rust-lang/cargo/pull/6076) +- Comments at the top of `Cargo.lock` are now preserved. + [#6181](https://github.com/rust-lang/cargo/pull/6181) +- When building in "very verbose" mode (`cargo build -vv`), build script + output is prefixed with the package name and version, such as `[foo 0.0.1]`. + [#6164](https://github.com/rust-lang/cargo/pull/6164) +- If `cargo fix --broken-code` fails to compile after fixes have been applied, + the files are no longer reverted and are left in their broken state. + [#6316](https://github.com/rust-lang/cargo/pull/6316) + +### Fixed +- Windows: Pass Ctrl-C to the process with `cargo run`. + [#6004](https://github.com/rust-lang/cargo/pull/6004) +- macOS: Fix bash completion. + [#6038](https://github.com/rust-lang/cargo/pull/6038) +- Support arbitrary toolchain names when completing `+toolchain` in bash + completion. [#6038](https://github.com/rust-lang/cargo/pull/6038) +- Fixed edge cases in the resolver, when backtracking on failed dependencies. + [#5988](https://github.com/rust-lang/cargo/pull/5988) +- Fixed `cargo test --all-targets` running lib tests three times. + [#6039](https://github.com/rust-lang/cargo/pull/6039) +- Fixed publishing renamed dependencies to crates.io. + [#5993](https://github.com/rust-lang/cargo/pull/5993) +- Fixed `cargo install` on a git repo with multiple binaries. + [#6060](https://github.com/rust-lang/cargo/pull/6060) +- Fixed deeply nested JSON emitted by rustc being lost. + [#6081](https://github.com/rust-lang/cargo/pull/6081) +- Windows: Fix locking msys terminals to 60 characters. + [#6122](https://github.com/rust-lang/cargo/pull/6122) +- Fixed renamed dependencies with dashes. + [#6140](https://github.com/rust-lang/cargo/pull/6140) +- Fixed linking against the wrong dylib when the dylib existed in both + `target/debug` and `target/debug/deps`. + [#6167](https://github.com/rust-lang/cargo/pull/6167) +- Fixed some unnecessary recompiles when `panic=abort` is used. + [#6170](https://github.com/rust-lang/cargo/pull/6170) + +## Cargo 1.30 (2018-10-25) +[524a578d...36d96825](https://github.com/rust-lang/cargo/compare/524a578d...36d96825) + +### Added +- 🔥 Added an animated progress bar shows progress during building. + [#5995](https://github.com/rust-lang/cargo/pull/5995/) +- Added `resolve.nodes.deps` key to `cargo metadata`, which includes more + information about resolved dependencies, and properly handles renamed + dependencies. [#5871](https://github.com/rust-lang/cargo/pull/5871) +- When creating a package, provide more detail with `-v` when failing to + discover if files are dirty in a git repository. Also fix a problem with + discovery on Windows. [#5858](https://github.com/rust-lang/cargo/pull/5858) +- Filters like `--bin`, `--test`, `--example`, `--bench`, or `--lib` can be + used in a workspace without selecting a specific package. + [#5873](https://github.com/rust-lang/cargo/pull/5873) +- `cargo run` can be used in a workspace without selecting a specific package. + [#5877](https://github.com/rust-lang/cargo/pull/5877) +- `cargo doc --message-format=json` now outputs JSON messages from rustdoc. + [#5878](https://github.com/rust-lang/cargo/pull/5878) +- Added `--message-format=short` to show one-line messages. + [#5879](https://github.com/rust-lang/cargo/pull/5879) +- Added `.cargo_vcs_info.json` file to `.crate` packages that captures the + current git hash. [#5886](https://github.com/rust-lang/cargo/pull/5886) +- Added `net.git-fetch-with-cli` configuration option to use the `git` + executable to fetch repositories instead of using the built-in libgit2 + library. [#5914](https://github.com/rust-lang/cargo/pull/5914) +- Added `required-features` to `cargo metadata`. + [#5902](https://github.com/rust-lang/cargo/pull/5902) +- `cargo uninstall` within a package will now uninstall that package. + [#5927](https://github.com/rust-lang/cargo/pull/5927) +- (Nightly only): Added + [metabuild](https://doc.rust-lang.org/1.30.0/cargo/reference/unstable.html#metabuild). + [#5628](https://github.com/rust-lang/cargo/pull/5628) +- Added `--allow-staged` flag to `cargo fix` to allow it to run if files are + staged in git. [#5943](https://github.com/rust-lang/cargo/pull/5943) +- Added `net.low-speed-limit` config value, and also honor `net.timeout` for + http operations. [#5957](https://github.com/rust-lang/cargo/pull/5957) +- Added `--edition` flag to `cargo new`. + [#5984](https://github.com/rust-lang/cargo/pull/5984) +- Temporarily stabilized 2018 edition support for the duration of the beta. + [#5984](https://github.com/rust-lang/cargo/pull/5984) + [#5989](https://github.com/rust-lang/cargo/pull/5989) +- Added support for `target.'cfg(…)'.runner` config value to specify the + run/test/bench runner for config-expressioned targets. + [#5959](https://github.com/rust-lang/cargo/pull/5959) + +### Changed +- Windows: `cargo run` will not kill child processes when the main process + exits. [#5887](https://github.com/rust-lang/cargo/pull/5887) +- Switched to the `opener` crate to open a web browser with `cargo doc + --open`. This should more reliably select the system-preferred browser on + all platforms. [#5888](https://github.com/rust-lang/cargo/pull/5888) +- Equal file mtimes now cause a target to be rebuilt. Previously only if files + were strictly *newer* than the last build would it cause a rebuild. + [#5919](https://github.com/rust-lang/cargo/pull/5919) +- Ignore `build.target` config value when running `cargo install`. + [#5874](https://github.com/rust-lang/cargo/pull/5874) +- Ignore `RUSTC_WRAPPER` for `cargo fix`. + [#5983](https://github.com/rust-lang/cargo/pull/5983) +- Ignore empty `RUSTC_WRAPPER`. + [#5985](https://github.com/rust-lang/cargo/pull/5985) + +### Fixed +- Fixed error when creating a package with an edition field in `Cargo.toml`. + [#5908](https://github.com/rust-lang/cargo/pull/5908) +- More consistently use relative paths for path dependencies in a workspace. + [#5935](https://github.com/rust-lang/cargo/pull/5935) +- `cargo fix` now always runs, even if it was run previously. + [#5944](https://github.com/rust-lang/cargo/pull/5944) +- Windows: Attempt to more reliably detect terminal width. msys-based + terminals are forced to 60 characters wide. + [#6010](https://github.com/rust-lang/cargo/pull/6010) +- Allow multiple target flags with `cargo doc --document-private-items`. + [6022](https://github.com/rust-lang/cargo/pull/6022) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000000..44e74195251 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,202 @@ +# Contributing to Cargo + +Thank you for your interest in contributing to Cargo! Good places to +start are this document, [ARCHITECTURE.md](ARCHITECTURE.md), which +describes the high-level structure of Cargo and [E-easy] bugs on the +issue tracker. + +If you have a general question about Cargo or it's internals, feel free to ask +on [Discord]. + +## Code of Conduct + +All contributors are expected to follow our [Code of Conduct]. + +## Bug reports + +We can't fix what we don't know about, so please report problems liberally. This +includes problems with understanding the documentation, unhelpful error messages +and unexpected behavior. + +**If you think that you have identified an issue with Cargo that might compromise +its users' security, please do not open a public issue on GitHub. Instead, +we ask you to refer to Rust's [security policy].** + +Opening an issue is as easy as following [this link][new-issues] and filling out +the fields. Here's a template that you can use to file an issue, though it's not +necessary to use it exactly: + + + + I tried this: + + I expected to see this happen: + + Instead, this happened: + + I'm using + +All three components are important: what you did, what you expected, what +happened instead. Please use https://gist.github.com/ if your examples run long. + + +## Feature requests + +Cargo follows the general Rust model of evolution. All major features go through +an RFC process. Therefore, before opening a feature request issue create a +Pre-RFC thread on the [internals][irlo] forum to get preliminary feedback. +Implementing a feature as a [custom subcommand][subcommands] is encouraged as it +helps demonstrate the demand for the functionality and is a great way to deliver +a working solution faster as it can iterate outside of cargo's release cadence. + +## Working on issues + +If you're looking for somewhere to start, check out the [E-easy][E-Easy] and +[E-mentor][E-mentor] tags. + +Feel free to ask for guidelines on how to tackle a problem on [Discord] or open a +[new issue][new-issues]. This is especially important if you want to add new +features to Cargo or make large changes to the already existing code-base. +Cargo's core developers will do their best to provide help. + +If you start working on an already-filed issue, post a comment on this issue to +let people know that somebody is working it. Feel free to ask for comments if +you are unsure about the solution you would like to submit. + +While Cargo does make use of some Rust-features available only through the +`nightly` toolchain, it must compile on stable Rust. Code added to Cargo +is encouraged to make use of the latest stable features of the language and +`stdlib`. + +We use the "fork and pull" model [described here][development-models], where +contributors push changes to their personal fork and create pull requests to +bring those changes into the source repository. This process is partly +automated: Pull requests are made against Cargo's master-branch, tested and +reviewed. Once a change is approved to be merged, a friendly bot merges the +changes into an internal branch, runs the full test-suite on that branch +and only then merges into master. This ensures that Cargo's master branch +passes the test-suite at all times. + +Your basic steps to get going: + +* Fork Cargo and create a branch from master for the issue you are working on. +* Please adhere to the code style that you see around the location you are +working on. +* [Commit as you go][githelp]. +* Include tests that cover all non-trivial code. The existing tests +in `test/` provide templates on how to test Cargo's behavior in a +sandbox-environment. The internal module `testsuite/support` provides a vast amount +of helpers to minimize boilerplate. See [`testsuite/support/mod.rs`] for an +introduction to writing tests. +* Make sure `cargo test` passes. If you do not have the cross-compilers +installed locally, install them using the instructions returned by +`cargo test cross_compile::cross_tests` (twice, with `--toolchain nightly` +added to get the nightly cross target too); alternatively just +ignore the cross-compile test failures or disable them by +using `CFG_DISABLE_CROSS_TESTS=1 cargo test`. Note that some tests are enabled +only on `nightly` toolchain. If you can, test both toolchains. +* All code changes are expected to comply with the formatting suggested by `rustfmt`. +You can use `rustup component add --toolchain nightly rustfmt` to install `rustfmt` and use +`rustfmt +nightly --unstable-features --skip-children` on the changed files to automatically format your code. +* Push your commits to GitHub and create a pull request against Cargo's +`master` branch. + +## Pull requests + +After the pull request is made, a friendly bot will automatically assign a +reviewer; the review-process will make sure that the proposed changes are +sound. Please give the assigned reviewer sufficient time, especially during +weekends. If you don't get a reply, you may poke the core developers on [Discord]. + +A merge of Cargo's master-branch and your changes is immediately queued +to be tested after the pull request is made. In case unforeseen +problems are discovered during this step (e.g., a failure on a platform you +originally did not develop on), you may ask for guidance. Push additional +commits to your branch to tackle these problems. + +The reviewer might point out changes deemed necessary. Please add them as +extra commits; this ensures that the reviewer can see what has changed since +the code was previously reviewed. Large or tricky changes may require several +passes of review and changes. + +Once the reviewer approves your pull request, a friendly bot picks it up +and [merges][mergequeue] it into Cargo's `master` branch. + +## Contributing to the documentation + +To contribute to the documentation, all you need to do is change the markdown +files in the `src/doc` directory. To view the rendered version of changes you +have made locally, make sure you have `mdbook` installed and run: + +```sh +cd src/doc +mdbook build +open book/index.html +``` + +To install `mdbook` run `cargo install mdbook`. + + +## Issue Triage + +Sometimes an issue will stay open, even though the bug has been fixed. And +sometimes, the original bug may go stale because something has changed in the +meantime. + +It can be helpful to go through older bug reports and make sure that they are +still valid. Load up an older issue, double check that it's still true, and +leave a comment letting us know if it is or is not. The [least recently +updated sort][lru] is good for finding issues like this. + +Contributors with sufficient permissions on the Rust-repository can help by +adding labels to triage issues: + +* Yellow, **A**-prefixed labels state which **area** of the project an issue + relates to. + +* Magenta, **B**-prefixed labels identify bugs which are **blockers**. + +* Light purple, **C**-prefixed labels represent the **category** of an issue. + In particular, **C-feature-request** marks *proposals* for new features. If + an issue is **C-feature-request**, but is not **Feature accepted** or **I-nominated**, + then it was not thoroughly discussed, and might need some additional design + or perhaps should be implemented as an external subcommand first. Ping + @rust-lang/cargo if you want to send a PR for such issue. + +* Dark purple, **Command**-prefixed labels mean the issue has to do with a + specific cargo command. + +* Green, **E**-prefixed labels explain the level of **experience** or + **effort** necessary to fix the issue. [**E-mentor**][E-mentor] issues also + have some instructions on how to get started. + +* Red, **I**-prefixed labels indicate the **importance** of the issue. The + **[I-nominated][]** label indicates that an issue has been nominated for + prioritizing at the next triage meeting. + +* Purple gray, **O**-prefixed labels are the **operating system** or platform + that this issue is specific to. + +* Orange, **P**-prefixed labels indicate a bug's **priority**. These labels + are only assigned during triage meetings and replace the **[I-nominated][]** + label. + +* The light orange **relnotes** label marks issues that should be documented in + the release notes of the next release. + + +[githelp]: https://dont-be-afraid-to-commit.readthedocs.io/en/latest/git/commandlinegit.html +[development-models]: https://help.github.com/articles/about-collaborative-development-models/ +[gist]: https://gist.github.com/ +[new-issues]: https://github.com/rust-lang/cargo/issues/new +[mergequeue]: https://buildbot2.rust-lang.org/homu/queue/cargo +[security policy]: https://www.rust-lang.org/security.html +[lru]: https://github.com/rust-lang/cargo/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-asc +[E-easy]: https://github.com/rust-lang/cargo/labels/E-easy +[E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor +[I-nominated]: https://github.com/rust-lang/cargo/labels/I-nominated +[Code of Conduct]: https://www.rust-lang.org/conduct.html +[Discord]: https://discordapp.com/invite/rust-lang +[`testsuite/support/mod.rs`]: https://github.com/rust-lang/cargo/blob/master/tests/testsuite/support/mod.rs +[irlo]: https://internals.rust-lang.org/ +[subcommands]: https://doc.rust-lang.org/cargo/reference/external-tools.html#custom-subcommands diff --git a/Cargo.lock b/Cargo.lock deleted file mode 100644 index a8ec3f854a6..00000000000 --- a/Cargo.lock +++ /dev/null @@ -1,418 +0,0 @@ -[root] -name = "cargo" -version = "0.5.0" -dependencies = [ - "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "bufstream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crates-io 0.1.0", - "curl 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "docopt 0.6.70 (registry+https://github.com/rust-lang/crates.io-index)", - "env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "filetime 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "git2-curl 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", - "glob 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "hamcrest 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libgit2-sys 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", - "semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)", - "tar 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "tempdir 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "term 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", - "threadpool 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", - "url 0.2.37 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "advapi32-sys" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "aho-corasick" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "memchr 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "bitflags" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "bufstream" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "cmake" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "gcc 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "crates-io" -version = "0.1.0" -dependencies = [ - "curl 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "curl" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "curl-sys 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "url 0.2.37 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "curl-sys" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "gcc 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libz-sys 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "docopt" -version = "0.6.70" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "regex 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", - "strsim 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "env_logger" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "log 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "filetime" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "flate2" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "miniz-sys 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "gcc" -version = "0.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "git2" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bitflags 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libgit2-sys 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", - "url 0.2.37 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "git2-curl" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "curl 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "url 0.2.37 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "glob" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "hamcrest" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "num 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "kernel32-sys" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "libc" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "libgit2-sys" -version = "0.2.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libssh2-sys 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", - "libz-sys 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "libressl-pnacl-sys" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "pnacl-build-helper 1.4.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "libssh2-sys" -version = "0.1.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cmake 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libz-sys 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "libz-sys" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "gcc 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "log" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "matches" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "memchr" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "miniz-sys" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "gcc 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num_cpus" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "openssl-sys" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "gcc 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libressl-pnacl-sys 2.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "pkg-config 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "pkg-config" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "pnacl-build-helper" -version = "1.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "tempdir 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rand" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "regex" -version = "0.1.41" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "aho-corasick 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "regex-syntax" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "rustc-serialize" -version = "0.3.16" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "semver" -version = "0.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "strsim" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "tar" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "filetime 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tempdir" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "term" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "threadpool" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "time" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "kernel32-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "toml" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "url" -version = "0.2.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "winapi" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - diff --git a/Cargo.toml b/Cargo.toml index 5c360c81df9..2fce01b9e04 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,13 +1,14 @@ -[project] +[package] name = "cargo" -version = "0.5.0" +version = "0.39.0" +edition = "2018" authors = ["Yehuda Katz ", "Carl Lerche ", "Alex Crichton "] -license = "MIT/Apache-2.0" +license = "MIT OR Apache-2.0" homepage = "https://crates.io" repository = "https://github.com/rust-lang/cargo" -documentation = "http://doc.crates.io" +documentation = "https://docs.rs/cargo" description = """ Cargo, a package manager for Rust. """ @@ -17,45 +18,97 @@ name = "cargo" path = "src/cargo/lib.rs" [dependencies] -advapi32-sys = "0.1" -curl = "0.2" -docopt = "0.6" -env_logger = "0.3" -filetime = "0.1" -flate2 = "0.2" -git2 = "0.2" -git2-curl = "0.2" -glob = "0.2" -kernel32-sys = "0.1" -libc = "0.1" -libgit2-sys = "0.2" -log = "0.3" -num_cpus = "0.2" -regex = "0.1" -crates-io = { path = "src/crates-io", version = "0.1" } -rustc-serialize = "0.3" -semver = "0.1" -tar = "0.3" -term = "0.2" -threadpool = "0.1" -time = "0.1" -toml = "0.1" -url = "0.2" -winapi = "0.2" +atty = "0.2" +bytesize = "1.0" +crates-io = { path = "crates/crates-io", version = "0.27" } +crossbeam-utils = "0.6" +crypto-hash = "0.3.1" +curl = { version = "0.4.21", features = ['http2'] } +curl-sys = "0.4.18" +env_logger = "0.6.0" +pretty_env_logger = { version = "0.3", optional = true } +failure = "0.1.5" +filetime = "0.2" +flate2 = { version = "1.0.3", features = ['zlib'] } +fs2 = "0.4" +git2 = "0.9.2" +git2-curl = "0.10.1" +glob = "0.3.0" +hex = "0.3" +home = "0.3" +ignore = "0.4.7" +lazy_static = "1.2.0" +jobserver = "0.1.13" +lazycell = "1.2.0" +libc = "0.2" +log = "0.4.6" +libgit2-sys = "0.8.2" +memchr = "2.1.3" +num_cpus = "1.0" +opener = "0.4" +percent-encoding = "2.0" +remove_dir_all = "0.5.2" +rustfix = "0.4.4" +same-file = "1" +semver = { version = "0.9.0", features = ["serde"] } +serde = { version = "1.0.82", features = ['derive'] } +serde_ignored = "0.0.4" +serde_json = { version = "1.0.30", features = ["raw_value"] } +shell-escape = "0.1.4" +strip-ansi-escapes = "0.1.0" +tar = { version = "0.4.18", default-features = false } +tempfile = "3.0" +termcolor = "1.0" +toml = "0.5.0" +url = { version = "2.0", features = ['serde'] } +walkdir = "2.2" +clap = "2.31.2" +unicode-width = "0.1.5" +openssl = { version = '0.10.11', optional = true } +im-rc = "13.0.0" + +# A noop dependency that changes in the Rust repository, it's a bit of a hack. +# See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust` +# for more information. +rustc-workspace-hack = "1.0.0" + +[target.'cfg(target_os = "macos")'.dependencies] +core-foundation = { version = "0.6.0", features = ["mac_os_10_7_support"] } + +[target.'cfg(windows)'.dependencies] +miow = "0.3.1" +fwdansi = "1" + +[target.'cfg(windows)'.dependencies.winapi] +version = "0.3" +features = [ + "basetsd", + "handleapi", + "jobapi", + "jobapi2", + "memoryapi", + "minwindef", + "ntdef", + "ntstatus", + "processenv", + "processthreadsapi", + "psapi", + "synchapi", + "winerror", + "winbase", + "wincon", + "winnt", +] [dev-dependencies] -tempdir = "0.3" -hamcrest = "0.1" -bufstream = "0.1" -filetime = "0.1" +cargo-test-macro = { path = "crates/cargo-test-macro", version = "0.1.0" } [[bin]] name = "cargo" test = false doc = false -[[test]] -name = "tests" - -[[test]] -name = "resolve" +[features] +deny-warnings = [] +vendored-openssl = ['openssl/vendored'] +pretty-env-logger = ['pretty_env_logger'] diff --git a/LICENSE-APACHE b/LICENSE-APACHE index 16fe87b06e8..c98d27d4f32 100644 --- a/LICENSE-APACHE +++ b/LICENSE-APACHE @@ -1,6 +1,6 @@ Apache License Version 2.0, January 2004 - http://www.apache.org/licenses/ + https://www.apache.org/licenses/LICENSE-2.0 TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +192,7 @@ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/LICENSE-MIT b/LICENSE-MIT index 39d4bdb5acd..31aa79387f2 100644 --- a/LICENSE-MIT +++ b/LICENSE-MIT @@ -1,5 +1,3 @@ -Copyright (c) 2014 The Rust Project Developers - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the diff --git a/LICENSE-THIRD-PARTY b/LICENSE-THIRD-PARTY index 42db8c192b3..8f83ab502aa 100644 --- a/LICENSE-THIRD-PARTY +++ b/LICENSE-THIRD-PARTY @@ -3,7 +3,7 @@ depends on a number of libraries which carry their own copyright notices and license terms. These libraries are normally all linked static into the binary distributions of Cargo: -* OpenSSL - http://www.openssl.org/source/license.html +* OpenSSL - https://www.openssl.org/source/license.html Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved. @@ -22,7 +22,7 @@ distributions of Cargo: 3. All advertising materials mentioning features or use of this software must display the following acknowledgment: "This product includes software developed by the OpenSSL Project - for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + for use in the OpenSSL Toolkit. (https://www.openssl.org/)" 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to endorse or promote products derived from this software without @@ -36,7 +36,7 @@ distributions of Cargo: 6. Redistributions of any form whatsoever must retain the following acknowledgment: "This product includes software developed by the OpenSSL Project - for use in the OpenSSL Toolkit (http://www.openssl.org/)" + for use in the OpenSSL Toolkit (https://www.openssl.org/)" THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE @@ -589,7 +589,7 @@ distributions of Cargo: that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. - + Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a @@ -645,7 +645,7 @@ distributions of Cargo: "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. - + GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION @@ -692,7 +692,7 @@ distributions of Cargo: You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. - + 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 @@ -750,7 +750,7 @@ distributions of Cargo: ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. - + Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. @@ -801,7 +801,7 @@ distributions of Cargo: distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. - + 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work @@ -863,7 +863,7 @@ distributions of Cargo: accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. - + 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined @@ -904,7 +904,7 @@ distributions of Cargo: restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. - + 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or @@ -956,7 +956,7 @@ distributions of Cargo: the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. - + 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is @@ -1037,7 +1037,7 @@ distributions of Cargo: ---------------------------------------------------------------------- -* libssh2 - http://www.libssh2.org/license.html +* libssh2 - https://www.libssh2.org/license.html Copyright (c) 2004-2007 Sara Golemon Copyright (c) 2005,2006 Mikhail Gusarov @@ -1080,7 +1080,7 @@ distributions of Cargo: USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -* libcurl - http://curl.haxx.se/docs/copyright.html +* libcurl - https://curl.haxx.se/docs/copyright.html COPYRIGHT AND PERMISSION NOTICE @@ -1268,5 +1268,5 @@ distributions of Cargo: ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - For more information, please refer to + For more information, please refer to diff --git a/Makefile.in b/Makefile.in deleted file mode 100644 index 0dd5c2c88a1..00000000000 --- a/Makefile.in +++ /dev/null @@ -1,234 +0,0 @@ -CFG_RELEASE_NUM=0.5.0 -CFG_RELEASE_LABEL= - -include config.mk - -ifneq ($(CFG_LOCAL_RUST_ROOT),) -export LD_LIBRARY_PATH := $(CFG_LOCAL_RUST_ROOT)/lib:$(LD_LIBRARY_PATH) -export DYLD_LIBRARY_PATH := $(CFG_LOCAL_RUST_ROOT)/lib:$(DYLD_LIBRARY_PATH) -endif - -export PATH := $(dir $(CFG_RUSTC)):$(PATH) - -ifdef CFG_ENABLE_NIGHTLY -CFG_RELEASE=$(CFG_RELEASE_NUM)$(CFG_RELEASE_LABEL)-nightly -CFG_PACKAGE_VERS = nightly -else -CFG_RELEASE=$(CFG_RELEASE_NUM)$(CFG_RELEASE_LABEL) -CFG_PACKAGE_VERS=$(CFG_RELEASE) -endif -CFG_BUILD_DATE = $(shell date +%F) - -ifeq ($(wildcard .git),) -CFG_VERSION = $(CFG_RELEASE) (built $(CFG_BUILD_DATE)) -else -CFG_VER_DATE = $(shell git log -1 --date=short --pretty=format:'%cd') -CFG_VER_HASH = $(shell git rev-parse --short HEAD) -CFG_VERSION = $(CFG_RELEASE) ($(CFG_VER_HASH) $(CFG_VER_DATE)) -endif -PKG_NAME = cargo-$(CFG_PACKAGE_VERS) - -ifdef CFG_DISABLE_VERIFY_INSTALL -MAYBE_DISABLE_VERIFY=--disable-verify -else -MAYBE_DISABLE_VERIFY= -endif - -ifdef CFG_ENABLE_OPTIMIZE -OPT_FLAG=--release -else -OPT_FLAG= -endif - -ifdef VERBOSE -VERBOSE_FLAG=--verbose -else -VERBOSE_FLAG= -endif - -export CFG_VERSION -export CFG_DISABLE_CROSS_TESTS - -ifeq ($(OS),Windows_NT) -X = .exe -endif - -TARGET_ROOT = target -BIN_TARGETS := cargo -BIN_TARGETS := $(BIN_TARGETS:src/bin/%.rs=%) -BIN_TARGETS := $(filter-out cargo,$(BIN_TARGETS)) - -define DIST_TARGET -ifdef CFG_ENABLE_OPTIMIZE -TARGET_$(1) = $$(TARGET_ROOT)/$(1)/release -else -TARGET_$(1) = $$(TARGET_ROOT)/$(1)/debug -endif -DISTDIR_$(1) = $$(TARGET_$(1))/dist -IMGDIR_$(1) = $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1)-image -OVERLAYDIR_$(1) = $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1)-overlay -PKGDIR_$(1) = $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1) -BIN_TARGETS_$(1) := $$(BIN_TARGETS:%=$$(TARGET_$(1))/%$$(X)) -endef -$(foreach target,$(CFG_TARGET),$(eval $(call DIST_TARGET,$(target)))) - -ifdef CFG_LOCAL_CARGO -CARGO := $(CFG_LOCAL_CARGO) -else -CARGO := $(TARGET_ROOT)/snapshot/cargo/bin/cargo$(X) -endif - -all: $(foreach target,$(CFG_TARGET),cargo-$(target)) - -define CARGO_TARGET -cargo-$(1): $$(CARGO) - $$(CFG_RUSTC) -V - $$(CARGO) --version - $$(CARGO) build --target $(1) $$(OPT_FLAG) $$(VERBOSE_FLAG) $$(ARGS) - -test-unit-$(1): $$(CARGO) - @mkdir -p target/$(1)/cit - $$(CARGO) test --target $(1) $$(VERBOSE_FLAG) $$(only) -endef -$(foreach target,$(CFG_TARGET),$(eval $(call CARGO_TARGET,$(target)))) - -$(TARGET_ROOT)/snapshot/cargo/bin/cargo$(X): src/snapshots.txt - $(CFG_PYTHON) src/etc/dl-snapshot.py $(CFG_BUILD) - touch $@ - - -# === Tests - -test: style no-exes $(foreach target,$(CFG_TARGET),test-unit-$(target)) - -style: - sh tests/check-style.sh - -no-exes: - find $$(git ls-files) -perm +a+x -type f \ - -not -name configure -not -name '*.sh' -not -name '*.rs' \ - -not -wholename "*/rust-installer/*" | \ - grep '.*' \ - && exit 1 || exit 0 - -# === Misc - -clean-all: clean -clean: - rm -rf $(TARGET_ROOT) - -# === Documentation - -DOCS := index faq config guide manifest build-script pkgid-spec crates-io \ - environment-variables -DOC_DIR := target/doc -DOC_OPTS := --markdown-no-toc \ - --markdown-css stylesheets/normalize.css \ - --markdown-css stylesheets/all.css \ - --markdown-css stylesheets/prism.css \ - --html-before-content src/doc/header.html \ - --html-after-content src/doc/footer.html -ASSETS := CNAME images/noise.png images/forkme.png images/Cargo-Logo-Small.png \ - stylesheets/all.css stylesheets/normalize.css javascripts/prism.js \ - javascripts/all.js stylesheets/prism.css images/circle-with-i.png \ - images/search.png images/org-level-acl.png images/auth-level-acl.png - -doc: $(foreach doc,$(DOCS),target/doc/$(doc).html) \ - $(foreach asset,$(ASSETS),target/doc/$(asset)) \ - target/doc/cargo/index.html - -target/doc/cargo/index.html: - $(CARGO) doc --no-deps - -$(DOC_DIR)/%.html: src/doc/%.md src/doc/header.html src/doc/footer.html - @mkdir -p $(@D) - $(CFG_RUSTDOC) $< -o $(@D) $(DOC_OPTS) - -$(DOC_DIR)/%: src/doc/% - @mkdir -p $(@D) - cp $< $@ - -# === Distribution - -define DO_DIST_TARGET -dist-$(1): $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1).tar.gz - -# One may wonder why some of the commands here are prefixed with `cd -P .`, and -# that's a good question! On some of the windows bots, PWD has a windows-style -# path, and that ends up choking the installation script in various ways. -# Prefixing commands with this `cd -P .` helps the bots to set the right PWD env -# var. -distcheck-$(1): dist-$(1) - rm -rf $$(TARGET_$(1))/distcheck - mkdir -p $$(TARGET_$(1))/distcheck - (cd $$(TARGET_$(1))/distcheck && tar xf ../dist/$$(PKG_NAME)-$(1).tar.gz) - cd -P . && $$(TARGET_$(1))/distcheck/$$(PKG_NAME)-$(1)/install.sh \ - --prefix=$$(TARGET_$(1))/distcheck/install - $$(TARGET_$(1))/distcheck/install/bin/cargo -V > /dev/null - cd -P . && $$(TARGET_$(1))/distcheck/$$(PKG_NAME)-$(1)/install.sh \ - --prefix=$$(TARGET_$(1))/distcheck/install --uninstall - [ -f $$(TARGET_$(1))/distcheck/install/bin/cargo$(X) ] && exit 1 || exit 0 - -prepare-image-$(1): - @[ -f $$(TARGET_$(1))/cargo$$(X) ] || echo 'Please run `make` first' - @[ -f $$(TARGET_$(1))/cargo$$(X) ] - rm -rf $$(IMGDIR_$(1)) - mkdir -p $$(IMGDIR_$(1))/bin $$(IMGDIR_$(1))/lib/cargo \ - $$(IMGDIR_$(1))/share/man/man1 \ - $$(IMGDIR_$(1))/share/doc/cargo \ - $$(IMGDIR_$(1))/share/zsh/site-functions \ - $$(IMGDIR_$(1))/etc/bash_completion.d - cp $$(TARGET_$(1))/cargo$$(X) $$(IMGDIR_$(1))/bin - cp src/etc/cargo.1 $$(IMGDIR_$(1))/share/man/man1 - cp src/etc/_cargo $$(IMGDIR_$(1))/share/zsh/site-functions/_cargo - cp src/etc/cargo.bashcomp.sh $$(IMGDIR_$(1))/etc/bash_completion.d/cargo - cp README.md LICENSE-MIT LICENSE-APACHE LICENSE-THIRD-PARTY \ - $$(IMGDIR_$(1))/share/doc/cargo - -prepare-overlay-$(1): - rm -Rf $$(OVERLAYDIR_$(1)) - mkdir -p $$(OVERLAYDIR_$(1)) - cp README.md LICENSE-MIT LICENSE-APACHE LICENSE-THIRD-PARTY \ - $$(OVERLAYDIR_$(1)) - echo "$(CFG_VERSION)" > $$(OVERLAYDIR_$(1))/version - -$$(DISTDIR_$(1))/$$(PKG_NAME)-$(1).tar.gz: prepare-image-$(1) prepare-overlay-$(1) - sh src/rust-installer/gen-installer.sh \ - --product-name=Rust \ - --rel-manifest-dir=rustlib \ - --success-message=Rust-is-ready-to-roll. \ - --image-dir=$$(IMGDIR_$(1)) \ - --work-dir=./$$(DISTDIR_$(1)) \ - --output-dir=./$$(DISTDIR_$(1)) \ - --non-installed-overlay=$$(OVERLAYDIR_$(1)) \ - --package-name=$$(PKG_NAME)-$(1) \ - --component-name=cargo \ - --legacy-manifest-dirs=rustlib,cargo - rm -Rf $$(IMGDIR_$(1)) - -install-$(1): $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1).tar.gz - $$(PKGDIR_$(1))/install.sh \ - --prefix="$$(CFG_PREFIX)" \ - --destdir="$$(DESTDIR)" $$(MAYBE_DISABLE_VERIFY) - -uninstall-$(1): $$(DISTDIR_$(1))/$$(PKG_NAME)-$(1).tar.gz - $$(PKGDIR_$(1))/install.sh \ - --prefix="$$(CFG_PREFIX)" \ - --destdir="$$(DESTDIR)" \ - --uninstall -endef -$(foreach target,$(CFG_TARGET),$(eval $(call DO_DIST_TARGET,$(target)))) - -dist: $(foreach target,$(CFG_TARGET),dist-$(target)) -distcheck: $(foreach target,$(CFG_TARGET),distcheck-$(target)) -install: $(foreach target,$(CFG_TARGET),install-$(target)) -uninstall: $(foreach target,$(CFG_TARGET), uninstall-$(target)) - - -# Setup phony tasks -.PHONY: all clean clean-all dist distcheck install uninstall test test-unit style - -# Disable unnecessary built-in rules -.SUFFIXES: - - diff --git a/README.md b/README.md index f86c6a93a90..813c44f4e28 100644 --- a/README.md +++ b/README.md @@ -1,56 +1,41 @@ +# Cargo + Cargo downloads your Rust project’s dependencies and compiles your project. -Learn more at http://doc.crates.io/ +Learn more at https://doc.rust-lang.org/cargo/ -## Installing Cargo +## Code Status -Cargo is distributed by default with Rust, so if you've got `rustc` installed -locally you probably also have `cargo` installed locally. +[![Build Status](https://dev.azure.com/rust-lang/cargo/_apis/build/status/rust-lang.cargo?branchName=master)](https://dev.azure.com/rust-lang/cargo/_build/latest?definitionId=18&branchName=master) -If, however, you would like to install Cargo from the nightly binaries that are -generated, you may also do so! Note that these nightlies are not official -binaries, so they are only provided in one format with one installation method. -Each tarball below contains a top-level `install.sh` script to install Cargo. +Code documentation: https://docs.rs/cargo/ -* [`x86_64-unknown-linux-gnu`](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-unknown-linux-gnu.tar.gz) -* [`i686-unknown-linux-gnu`](https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-unknown-linux-gnu.tar.gz) -* [`x86_64-apple-darwin`](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-apple-darwin.tar.gz) -* [`i686-apple-darwin`](https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-apple-darwin.tar.gz) -* [`x86_64-pc-windows-gnu`](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-pc-windows-gnu.tar.gz) -* [`i686-pc-windows-gnu`](https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-pc-windows-gnu.tar.gz) -* [`x86_64-pc-windows-msvc`](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-pc-windows-msvc.tar.gz) +## Installing Cargo -Note that if you're on Windows you will have to run the `install.sh` script from -inside an MSYS shell, likely from a MinGW-64 installation. +Cargo is distributed by default with Rust, so if you've got `rustc` installed +locally you probably also have `cargo` installed locally. ## Compiling from Source Cargo requires the following tools and packages to build: -* `rustc` +* `git` * `python` * `curl` (on Unix) -* `cmake` * OpenSSL headers (only for Unix, this is the `libssl-dev` package on ubuntu) +* `cargo` and `rustc` -Cargo can then be compiled like many other standard unix-like projects: +First, you'll want to check out this repository -```sh +``` git clone https://github.com/rust-lang/cargo cd cargo -git submodule update --init -python -B src/etc/install-deps.py -./configure --local-rust-root="$PWD"/rustc -make -make install ``` -More options can be discovered through `./configure`, such as compiling cargo -for more than one target. For example, if you'd like to compile both 32 and 64 -bit versions of cargo on unix you would use: +With `cargo` already installed, you can simply run: ``` -$ ./configure --target=i686-unknown-linux-gnu,x86_64-unknown-linux-gnu +cargo build --release ``` ## Adding new subcommands to Cargo @@ -61,19 +46,29 @@ a list of known community-developed subcommands. [third-party-subcommands]: https://github.com/rust-lang/cargo/wiki/Third-party-cargo-subcommands -## Contributing to the Docs -To contribute to the docs, all you need to do is change the markdown files in -the `src/doc` directory. +## Releases + +Cargo releases coincide with Rust releases. +High level release notes are available as part of [Rust's release notes][rel]. +Detailed release notes are available in this repo at [CHANGELOG.md]. + +[rel]: https://github.com/rust-lang/rust/blob/master/RELEASES.md +[CHANGELOG.md]: CHANGELOG.md -## Reporting Issues +## Reporting issues Found a bug? We'd love to know about it! -Please report all issues on the github [issue tracker][issues]. +Please report all issues on the GitHub [issue tracker][issues]. [issues]: https://github.com/rust-lang/cargo/issues +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md). You may also find the architecture +documentation useful ([ARCHITECTURE.md](ARCHITECTURE.md)). + ## License Cargo is primarily distributed under the terms of both the MIT license @@ -84,11 +79,13 @@ See LICENSE-APACHE and LICENSE-MIT for details. ### Third party software This product includes software developed by the OpenSSL Project -for use in the OpenSSL Toolkit (http://www.openssl.org/). +for use in the OpenSSL Toolkit (https://www.openssl.org/). In binary form, this product includes software that is licensed under the terms of the GNU General Public License, version 2, with a linking exception, which can be obtained from the [upstream repository][1]. +See LICENSE-THIRD-PARTY for details. + [1]: https://github.com/libgit2/libgit2 diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 18fc5f2fd5a..00000000000 --- a/appveyor.yml +++ /dev/null @@ -1,31 +0,0 @@ -environment: - CFG_DISABLE_CROSS_TESTS: 1 - matrix: - - MSVC: 1 - BITS: 32 - TARGET: i686-pc-windows-msvc - ARCH: x86 - NEEDS_LIBGCC: 1 - - MSVC: 1 - BITS: 64 - TARGET: x86_64-pc-windows-msvc - ARCH: amd64 - -install: - - python src/etc/install-deps.py - - python src/etc/dl-snapshot.py %TARGET% - - call "C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat" %ARCH% - - SET PATH=%PATH%;%cd%/rustc/bin - - SET PATH=%PATH%;%cd%/target/snapshot/cargo/bin - - if defined NEEDS_LIBGCC set PATH=%PATH%;C:\MinGW\bin - - rustc -V - - cargo -V - -build: false - -test_script: - - cargo test -- --nocapture - -branches: - only: - - master diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 00000000000..5bb3f65094f --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,91 @@ +trigger: + branches: + include: + - '*' + exclude: + - master +pr: +- master + +jobs: +- job: Linux + pool: + vmImage: ubuntu-16.04 + steps: + - template: ci/azure-test-all.yml + strategy: + matrix: + stable: + TOOLCHAIN: stable + beta: + TOOLCHAIN: beta + nightly: + TOOLCHAIN: nightly + variables: + OTHER_TARGET: i686-unknown-linux-gnu + +- job: macOS + pool: + vmImage: macos-10.13 + steps: + - template: ci/azure-test-all.yml + variables: + TOOLCHAIN: stable + OTHER_TARGET: i686-apple-darwin + +- job: Windows + pool: + vmImage: windows-2019 + steps: + - template: ci/azure-test-all.yml + strategy: + matrix: + x86_64-msvc: + TOOLCHAIN: stable-x86_64-pc-windows-msvc + OTHER_TARGET: i686-pc-windows-msvc +- job: rustfmt + pool: + vmImage: ubuntu-16.04 + steps: + - template: ci/azure-install-rust.yml + - bash: rustup component add rustfmt + displayName: "Install rustfmt" + - bash: cargo fmt --all -- --check + displayName: "Check rustfmt (cargo)" + - bash: cd crates/cargo-test-macro && cargo fmt --all -- --check + displayName: "Check rustfmt (cargo-test-macro)" + - bash: cd crates/crates-io && cargo fmt --all -- --check + displayName: "Check rustfmt (crates-io)" + - bash: cd crates/resolver-tests && cargo fmt --all -- --check + displayName: "Check rustfmt (resolver-tests)" + variables: + TOOLCHAIN: stable + +- job: resolver + pool: + vmImage: ubuntu-16.04 + steps: + - template: ci/azure-install-rust.yml + - bash: cargo test --manifest-path crates/resolver-tests/Cargo.toml + displayName: "Resolver tests" + variables: + TOOLCHAIN: stable + +- job: docs + pool: + vmImage: ubuntu-16.04 + steps: + - template: ci/azure-install-rust.yml + - bash: | + set -e + mkdir mdbook + curl -Lf https://github.com/rust-lang-nursery/mdBook/releases/download/v0.3.1/mdbook-v0.3.1-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook + echo "##vso[task.prependpath]`pwd`/mdbook" + displayName: "Install mdbook" + - bash: cargo doc --no-deps + displayName: "Build documentation" + - bash: cd src/doc && mdbook build --dest-dir ../../target/doc + displayName: "Build mdbook documentation" + variables: + TOOLCHAIN: stable + diff --git a/ci/azure-install-rust.yml b/ci/azure-install-rust.yml new file mode 100644 index 00000000000..c48d0d0155f --- /dev/null +++ b/ci/azure-install-rust.yml @@ -0,0 +1,28 @@ +steps: + - bash: | + set -e + if command -v rustup; then + echo `command -v rustup` `rustup -V` already installed + rustup self update + elif [ "$AGENT_OS" = "Windows_NT" ]; then + curl -sSf -o rustup-init.exe https://win.rustup.rs + rustup-init.exe -y --default-toolchain $TOOLCHAIN + echo "##vso[task.prependpath]$USERPROFILE/.cargo/bin" + else + curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $TOOLCHAIN + echo "##vso[task.prependpath]$HOME/.cargo/bin" + fi + displayName: Install rustup + + - bash: | + set -e + rustup update $TOOLCHAIN + rustup default $TOOLCHAIN + displayName: Install rust + + - bash: | + set -ex + rustup -V + rustc -Vv + cargo -V + displayName: Query rust and cargo versions diff --git a/ci/azure-test-all.yml b/ci/azure-test-all.yml new file mode 100644 index 00000000000..626858431e8 --- /dev/null +++ b/ci/azure-test-all.yml @@ -0,0 +1,28 @@ +steps: +- checkout: self + fetchDepth: 1 + +- template: azure-install-rust.yml + +- bash: rustup target add $OTHER_TARGET + displayName: "Install cross-compile target" + +- bash: sudo apt install gcc-multilib + displayName: "Install gcc-multilib (linux)" + condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) + +# Some tests rely on a clippy command to run, so let's try to install clippy to +# we can be sure to run those tests. +- bash: rustup component add clippy || echo "clippy not available" + displayName: "Install clippy (maybe)" + +# Deny warnings on CI to avoid warnings getting into the codebase, and note the +# `force-system-lib-on-osx` which is intended to fix compile issues on OSX where +# compiling curl from source on OSX yields linker errors on Azure. +# +# Note that the curl issue is traced back to alexcrichton/curl-rust#279 where it +# looks like the OSX version we're actually running on is such that a symbol is +# emitted that's never worked. For now force the system library to be used to +# fix the link errors. +- bash: cargo test --features 'deny-warnings curl/force-system-lib-on-osx' + displayName: "cargo test" diff --git a/configure b/configure deleted file mode 100755 index 5c2f8b3e235..00000000000 --- a/configure +++ /dev/null @@ -1,410 +0,0 @@ -#!/bin/sh - -msg() { - echo "configure: $1" -} - -step_msg() { - msg - msg "$1" - msg -} - -warn() { - echo "configure: WARNING: $1" -} - -err() { - echo "configure: error: $1" - exit 1 -} - -need_ok() { - if [ $? -ne 0 ] - then - err "$1" - fi -} - -need_cmd() { - if command -v $1 >/dev/null 2>&1 - then msg "found $1" - else err "need $1" - fi -} - -make_dir() { - if [ ! -d $1 ] - then - msg "mkdir -p $1" - mkdir -p $1 - fi -} - -copy_if_changed() { - if cmp -s $1 $2 - then - msg "leaving $2 unchanged" - else - msg "cp $1 $2" - cp -f $1 $2 - chmod u-w $2 # make copied artifact read-only - fi -} - -move_if_changed() { - if cmp -s $1 $2 - then - msg "leaving $2 unchanged" - else - msg "mv $1 $2" - mv -f $1 $2 - chmod u-w $2 # make moved artifact read-only - fi -} - -putvar() { - local T - eval T=\$$1 - eval TLEN=\${#$1} - if [ $TLEN -gt 35 ] - then - printf "configure: %-20s := %.35s ...\n" $1 "$T" - else - printf "configure: %-20s := %s %s\n" $1 "$T" "$2" - fi - printf "%-20s := %s\n" $1 "$T" >>config.tmp -} - -probe() { - local V=$1 - shift - local P - local T - for P - do - T=$(command -v $P 2>&1) - if [ $? -eq 0 ] - then - VER0=$($P --version 2>/dev/null | head -1 \ - | sed -e 's/[^0-9]*\([vV]\?[0-9.]\+[^ ]*\).*/\1/' ) - if [ $? -eq 0 -a "x${VER0}" != "x" ] - then - VER="($VER0)" - else - VER="" - fi - break - else - VER="" - T="" - fi - done - eval $V=\$T - putvar $V "$VER" -} - -probe_need() { - local V=$1 - probe $* - eval VV=\$$V - if [ -z "$VV" ] - then - err "needed, but unable to find any of: $*" - fi -} - -validate_opt () { - for arg in $CFG_CONFIGURE_ARGS - do - isArgValid=0 - for option in $BOOL_OPTIONS - do - if test --disable-$option = $arg - then - isArgValid=1 - fi - if test --enable-$option = $arg - then - isArgValid=1 - fi - done - for option in $VAL_OPTIONS - do - if echo "$arg" | grep -q -- "--$option=" - then - isArgValid=1 - fi - done - if [ "$arg" = "--help" ] - then - echo - echo "No more help available for Configure options," - echo "check the Wiki or join our IRC channel" - break - else - if test $isArgValid -eq 0 - then - err "Option '$arg' is not recognized" - fi - fi - done -} - -valopt() { - VAL_OPTIONS="$VAL_OPTIONS $1" - - local OP=$1 - local DEFAULT=$2 - shift - shift - local DOC="$*" - if [ $HELP -eq 0 ] - then - local UOP=$(echo $OP | tr '[:lower:]' '[:upper:]' | tr '\-' '\_') - local V="CFG_${UOP}" - eval $V="$DEFAULT" - for arg in $CFG_CONFIGURE_ARGS - do - if echo "$arg" | grep -q -- "--$OP=" - then - val=$(echo "$arg" | cut -f2 -d=) - eval $V=$val - fi - done - putvar $V - else - if [ -z "$DEFAULT" ] - then - DEFAULT="" - fi - OP="${OP}=[${DEFAULT}]" - printf " --%-30s %s\n" "$OP" "$DOC" - fi -} - -opt() { - BOOL_OPTIONS="$BOOL_OPTIONS $1" - - local OP=$1 - local DEFAULT=$2 - shift - shift - local DOC="$*" - local FLAG="" - - if [ $DEFAULT -eq 0 ] - then - FLAG="enable" - else - FLAG="disable" - DOC="don't $DOC" - fi - - if [ $HELP -eq 0 ] - then - for arg in $CFG_CONFIGURE_ARGS - do - if [ "$arg" = "--${FLAG}-${OP}" ] - then - OP=$(echo $OP | tr 'a-z-' 'A-Z_') - FLAG=$(echo $FLAG | tr 'a-z' 'A-Z') - local V="CFG_${FLAG}_${OP}" - eval $V=1 - putvar $V - fi - done - else - if [ ! -z "$META" ] - then - OP="$OP=<$META>" - fi - printf " --%-30s %s\n" "$FLAG-$OP" "$DOC" - fi -} - -envopt() { - local NAME=$1 - local V="CFG_${NAME}" - eval VV=\$$V - - # If configure didn't set a value already, then check environment. - # - # (It is recommended that the configure script always check the - # environment before setting any values to envopt variables; see - # e.g. how CFG_CC is handled, where it first checks `-z "$CC"`, - # and issues msg if it ends up employing that provided value.) - if [ -z "$VV" ] - then - eval $V=\$$NAME - eval VV=\$$V - fi - - # If script or environment provided a value, save it. - if [ ! -z "$VV" ] - then - putvar $V - fi -} - -msg "looking for configure programs" -need_cmd cmp -need_cmd mkdir -need_cmd printf -need_cmd cut -need_cmd head -need_cmd grep -need_cmd xargs -need_cmd cp -need_cmd find -need_cmd uname -need_cmd date -need_cmd tr -need_cmd sed -need_cmd cmake -if [ "${OS}" != "Windows_NT" ]; then - need_cmd curl -fi - -CFG_SRC_DIR="$(cd $(dirname $0) && pwd)/" -CFG_BUILD_DIR="$(pwd)/" -CFG_SELF="$0" -CFG_CONFIGURE_ARGS="$@" - -OPTIONS="" -HELP=0 -if [ "$1" = "--help" ] -then - HELP=1 - shift - echo - echo "Usage: $CFG_SELF [options]" - echo - echo "Options:" - echo -else - msg "recreating config.tmp" - echo '' >config.tmp - - step_msg "processing $CFG_SELF args" -fi - -BOOL_OPTIONS="" -VAL_OPTIONS="" - -opt debug 1 "build with extra debug fun" -opt optimize 0 "build with optimizations" -opt nightly 0 "build nightly packages" -opt verify-install 1 "verify installed binaries work" -opt cross-tests 1 "run cross-compilation tests" -valopt prefix "/usr/local" "set installation prefix" -valopt local-rust-root "" "set prefix for local rust binary" - -if [ $HELP -eq 0 ]; then - if [ ! -z "${CFG_LOCAL_RUST_ROOT}" ]; then - export LD_LIBRARY_PATH="${CFG_LOCAL_RUST_ROOT}/lib:$LD_LIBRARY_PATH" - export DYLD_LIBRARY_PATH="${CFG_LOCAL_RUST_ROOT}/lib:$DYLD_LIBRARY_PATH" - LRV=`${CFG_LOCAL_RUST_ROOT}/bin/rustc --version` - if [ $? -eq 0 ]; then - step_msg "using rustc at: ${CFG_LOCAL_RUST_ROOT} with version: $LRV" - else - err "failed to run rustc at: ${CFG_LOCAL_RUST_ROOT}" - fi - CFG_RUSTC="${CFG_LOCAL_RUST_ROOT}/bin/rustc" - else - probe_need CFG_RUSTC rustc - fi - DEFAULT_BUILD=$("${CFG_RUSTC}" -vV | grep 'host: ' | sed 's/host: //') -fi - -valopt build "${DEFAULT_BUILD}" "GNUs ./configure syntax LLVM build triple" -valopt host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples" -valopt target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples" - -valopt localstatedir "/var/lib" "local state directory" -valopt sysconfdir "/etc" "install system configuration files" -valopt datadir "${CFG_PREFIX}/share" "install data" -valopt infodir "${CFG_PREFIX}/share/info" "install additional info" -valopt mandir "${CFG_PREFIX}/share/man" "install man pages in PATH" -valopt libdir "${CFG_PREFIX}/lib" "install libraries" -valopt local-cargo "" "local cargo to bootstrap from" - -if [ $HELP -eq 1 ] -then - echo - exit 0 -fi - -# Validate Options -step_msg "validating $CFG_SELF args" -validate_opt - -step_msg "looking for build programs" - -probe_need CFG_CURLORWGET curl wget -probe_need CFG_PYTHON python -probe_need CFG_CC cc gcc clang - -if [ ! -z "${CFG_LOCAL_RUST_ROOT}" ]; then - CFG_RUSTDOC="${CFG_LOCAL_RUST_ROOT}/bin/rustdoc" -else - probe_need CFG_RUSTDOC rustdoc -fi - -# a little post-processing of various config values -CFG_PREFIX=${CFG_PREFIX%/} -CFG_MANDIR=${CFG_MANDIR%/} -CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')" -CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')" - -# copy host-triples to target-triples so that hosts are a subset of targets -V_TEMP="" -for i in $CFG_HOST $CFG_TARGET; -do - echo "$V_TEMP" | grep -qF $i || V_TEMP="$V_TEMP${V_TEMP:+ }$i" -done -CFG_TARGET=$V_TEMP - -if [ "$CFG_SRC_DIR" != "$CFG_BUILD_DIR" ]; then - err "cargo does not currently support an out-of-tree build dir" -fi - -if [ ! -z "$CFG_ENABLE_NIGHTLY" ]; then - if [ ! -f .cargo/config ]; then - mkdir -p .cargo - cat > .cargo/config <<-EOF -[target.x86_64-unknown-linux-gnu.openssl] -rustc-flags = "-l static=ssl -l static=crypto -l dl -L /home/rustbuild/root64/lib" -root = "/home/rustbuild/root64" -include = "/home/rustbuild/root64/include" - -[target.i686-unknown-linux-gnu.openssl] -rustc-flags = "-l static=ssl -l static=crypto -l dl -L /home/rustbuild/root32/lib" -root = "/home/rustbuild/root32" -include = "/home/rustbuild/root32/include" -EOF - fi -fi - -step_msg "writing configuration" - -putvar CFG_SRC_DIR -putvar CFG_BUILD_DIR -putvar CFG_CONFIGURE_ARGS -putvar CFG_PREFIX -putvar CFG_BUILD -putvar CFG_HOST -putvar CFG_TARGET -putvar CFG_LIBDIR -putvar CFG_MANDIR -putvar CFG_RUSTC -putvar CFG_RUSTDOC - -msg -copy_if_changed ${CFG_SRC_DIR}Makefile.in ./Makefile -move_if_changed config.tmp config.mk -rm -f config.tmp -touch config.stamp - -step_msg "complete" -msg diff --git a/crates/cargo-test-macro/Cargo.toml b/crates/cargo-test-macro/Cargo.toml new file mode 100644 index 00000000000..7f204278bb4 --- /dev/null +++ b/crates/cargo-test-macro/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "cargo-test-macro" +version = "0.1.0" +authors = ["Jethro Beekman "] +edition = "2018" +license = "MIT OR Apache-2.0" +homepage = "https://github.com/rust-lang/cargo" +repository = "https://github.com/rust-lang/cargo" +documentation = "https://github.com/rust-lang/cargo" +description = "Helper proc-macro for Cargo's testsuite." + +[lib] +proc-macro = true diff --git a/crates/cargo-test-macro/src/lib.rs b/crates/cargo-test-macro/src/lib.rs new file mode 100644 index 00000000000..678bb83c0cb --- /dev/null +++ b/crates/cargo-test-macro/src/lib.rs @@ -0,0 +1,59 @@ +extern crate proc_macro; + +use proc_macro::*; + +#[proc_macro_attribute] +pub fn cargo_test(_attr: TokenStream, item: TokenStream) -> TokenStream { + let span = Span::call_site(); + let mut ret = TokenStream::new(); + ret.extend(Some(TokenTree::from(Punct::new('#', Spacing::Alone)))); + let test = TokenTree::from(Ident::new("test", span)); + ret.extend(Some(TokenTree::from(Group::new( + Delimiter::Bracket, + test.into(), + )))); + + for token in item { + let group = match token { + TokenTree::Group(g) => { + if g.delimiter() == Delimiter::Brace { + g + } else { + ret.extend(Some(TokenTree::Group(g))); + continue; + } + } + other => { + ret.extend(Some(other)); + continue; + } + }; + + let mut new_body = vec![ + TokenTree::from(Ident::new("let", span)), + TokenTree::from(Ident::new("_test_guard", span)), + TokenTree::from(Punct::new('=', Spacing::Alone)), + TokenTree::from(Ident::new("crate", span)), + TokenTree::from(Punct::new(':', Spacing::Joint)), + TokenTree::from(Punct::new(':', Spacing::Alone)), + TokenTree::from(Ident::new("support", span)), + TokenTree::from(Punct::new(':', Spacing::Joint)), + TokenTree::from(Punct::new(':', Spacing::Alone)), + TokenTree::from(Ident::new("paths", span)), + TokenTree::from(Punct::new(':', Spacing::Joint)), + TokenTree::from(Punct::new(':', Spacing::Alone)), + TokenTree::from(Ident::new("init_root", span)), + TokenTree::from(Group::new(Delimiter::Parenthesis, TokenStream::new())), + TokenTree::from(Punct::new(';', Spacing::Alone)), + ] + .into_iter() + .collect::(); + new_body.extend(group.stream()); + ret.extend(Some(TokenTree::from(Group::new( + group.delimiter(), + new_body, + )))); + } + + return ret; +} diff --git a/src/crates-io/Cargo.toml b/crates/crates-io/Cargo.toml similarity index 51% rename from src/crates-io/Cargo.toml rename to crates/crates-io/Cargo.toml index 70068ec5bf2..ec1e89dad4e 100644 --- a/src/crates-io/Cargo.toml +++ b/crates/crates-io/Cargo.toml @@ -1,8 +1,9 @@ [package] name = "crates-io" -version = "0.1.0" +version = "0.27.0" +edition = "2018" authors = ["Alex Crichton "] -license = "MIT/Apache-2.0" +license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/cargo" description = """ Helpers for interacting with crates.io @@ -13,5 +14,11 @@ name = "crates_io" path = "lib.rs" [dependencies] -curl = "0.2" -rustc-serialize = "0.3" +curl = "0.4" +failure = "0.1.1" +http = "0.1" +percent-encoding = "2.0" +serde = { version = "1.0", features = ['derive'] } +serde_derive = "1.0" +serde_json = "1.0" +url = "2.0" diff --git a/crates/crates-io/LICENSE-APACHE b/crates/crates-io/LICENSE-APACHE new file mode 120000 index 00000000000..1cd601d0a3a --- /dev/null +++ b/crates/crates-io/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/crates-io/LICENSE-MIT b/crates/crates-io/LICENSE-MIT new file mode 120000 index 00000000000..b2cfbdc7b0b --- /dev/null +++ b/crates/crates-io/LICENSE-MIT @@ -0,0 +1 @@ +../../LICENSE-MIT \ No newline at end of file diff --git a/crates/crates-io/lib.rs b/crates/crates-io/lib.rs new file mode 100644 index 00000000000..1f33d8d4330 --- /dev/null +++ b/crates/crates-io/lib.rs @@ -0,0 +1,373 @@ +#![allow(unknown_lints)] +#![allow(clippy::identity_op)] // used for vertical alignment + +use std::collections::BTreeMap; +use std::fs::File; +use std::io::prelude::*; +use std::io::Cursor; +use std::time::Instant; + +use curl::easy::{Easy, List}; +use failure::bail; +use http::status::StatusCode; +use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; +use serde::{Deserialize, Serialize}; +use serde_json; +use url::Url; + +pub type Result = std::result::Result; + +pub struct Registry { + /// The base URL for issuing API requests. + host: String, + /// Optional authorization token. + /// If None, commands requiring authorization will fail. + token: Option, + /// Curl handle for issuing requests. + handle: Easy, +} + +#[derive(PartialEq, Clone, Copy)] +pub enum Auth { + Authorized, + Unauthorized, +} + +#[derive(Deserialize)] +pub struct Crate { + pub name: String, + pub description: Option, + pub max_version: String, +} + +#[derive(Serialize)] +pub struct NewCrate { + pub name: String, + pub vers: String, + pub deps: Vec, + pub features: BTreeMap>, + pub authors: Vec, + pub description: Option, + pub documentation: Option, + pub homepage: Option, + pub readme: Option, + pub readme_file: Option, + pub keywords: Vec, + pub categories: Vec, + pub license: Option, + pub license_file: Option, + pub repository: Option, + pub badges: BTreeMap>, + #[serde(default)] + pub links: Option, +} + +#[derive(Serialize)] +pub struct NewCrateDependency { + pub optional: bool, + pub default_features: bool, + pub name: String, + pub features: Vec, + pub version_req: String, + pub target: Option, + pub kind: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub registry: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub explicit_name_in_toml: Option, +} + +#[derive(Deserialize)] +pub struct User { + pub id: u32, + pub login: String, + pub avatar: Option, + pub email: Option, + pub name: Option, +} + +pub struct Warnings { + pub invalid_categories: Vec, + pub invalid_badges: Vec, + pub other: Vec, +} + +#[derive(Deserialize)] +struct R { + ok: bool, +} +#[derive(Deserialize)] +struct OwnerResponse { + ok: bool, + msg: String, +} +#[derive(Deserialize)] +struct ApiErrorList { + errors: Vec, +} +#[derive(Deserialize)] +struct ApiError { + detail: String, +} +#[derive(Serialize)] +struct OwnersReq<'a> { + users: &'a [&'a str], +} +#[derive(Deserialize)] +struct Users { + users: Vec, +} +#[derive(Deserialize)] +struct TotalCrates { + total: u32, +} +#[derive(Deserialize)] +struct Crates { + crates: Vec, + meta: TotalCrates, +} +impl Registry { + pub fn new(host: String, token: Option) -> Registry { + Registry::new_handle(host, token, Easy::new()) + } + + pub fn new_handle(host: String, token: Option, handle: Easy) -> Registry { + Registry { + host, + token, + handle, + } + } + + pub fn host(&self) -> &str { + &self.host + } + + pub fn host_is_crates_io(&self) -> bool { + Url::parse(self.host()) + .map(|u| u.host_str() == Some("crates.io")) + .unwrap_or(false) + } + + pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result { + let body = serde_json::to_string(&OwnersReq { users: owners })?; + let body = self.put(&format!("/crates/{}/owners", krate), body.as_bytes())?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(serde_json::from_str::(&body)?.msg) + } + + pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { + let body = serde_json::to_string(&OwnersReq { users: owners })?; + let body = self.delete(&format!("/crates/{}/owners", krate), Some(body.as_bytes()))?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + pub fn list_owners(&mut self, krate: &str) -> Result> { + let body = self.get(&format!("/crates/{}/owners", krate))?; + Ok(serde_json::from_str::(&body)?.users) + } + + pub fn publish(&mut self, krate: &NewCrate, tarball: &File) -> Result { + let json = serde_json::to_string(krate)?; + // Prepare the body. The format of the upload request is: + // + // + // (metadata for the package) + // + // + let stat = tarball.metadata()?; + let header = { + let mut w = Vec::new(); + w.extend( + [ + (json.len() >> 0) as u8, + (json.len() >> 8) as u8, + (json.len() >> 16) as u8, + (json.len() >> 24) as u8, + ] + .iter() + .cloned(), + ); + w.extend(json.as_bytes().iter().cloned()); + w.extend( + [ + (stat.len() >> 0) as u8, + (stat.len() >> 8) as u8, + (stat.len() >> 16) as u8, + (stat.len() >> 24) as u8, + ] + .iter() + .cloned(), + ); + w + }; + let size = stat.len() as usize + header.len(); + let mut body = Cursor::new(header).chain(tarball); + + let url = format!("{}/api/v1/crates/new", self.host); + + let token = match self.token.as_ref() { + Some(s) => s, + None => bail!("no upload token found, please run `cargo login`"), + }; + self.handle.put(true)?; + self.handle.url(&url)?; + self.handle.in_filesize(size as u64)?; + let mut headers = List::new(); + headers.append("Accept: application/json")?; + headers.append(&format!("Authorization: {}", token))?; + self.handle.http_headers(headers)?; + + let body = self.handle(&mut |buf| body.read(buf).unwrap_or(0))?; + + let response = if body.is_empty() { + "{}".parse()? + } else { + body.parse::()? + }; + + let invalid_categories: Vec = response + .get("warnings") + .and_then(|j| j.get("invalid_categories")) + .and_then(|j| j.as_array()) + .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) + .unwrap_or_else(Vec::new); + + let invalid_badges: Vec = response + .get("warnings") + .and_then(|j| j.get("invalid_badges")) + .and_then(|j| j.as_array()) + .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) + .unwrap_or_else(Vec::new); + + let other: Vec = response + .get("warnings") + .and_then(|j| j.get("other")) + .and_then(|j| j.as_array()) + .map(|x| x.iter().flat_map(|j| j.as_str()).map(Into::into).collect()) + .unwrap_or_else(Vec::new); + + Ok(Warnings { + invalid_categories, + invalid_badges, + other, + }) + } + + pub fn search(&mut self, query: &str, limit: u32) -> Result<(Vec, u32)> { + let formatted_query = percent_encode(query.as_bytes(), NON_ALPHANUMERIC); + let body = self.req( + &format!("/crates?q={}&per_page={}", formatted_query, limit), + None, + Auth::Unauthorized, + )?; + + let crates = serde_json::from_str::(&body)?; + Ok((crates.crates, crates.meta.total)) + } + + pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> { + let body = self.delete(&format!("/crates/{}/{}/yank", krate, version), None)?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> { + let body = self.put(&format!("/crates/{}/{}/unyank", krate, version), &[])?; + assert!(serde_json::from_str::(&body)?.ok); + Ok(()) + } + + fn put(&mut self, path: &str, b: &[u8]) -> Result { + self.handle.put(true)?; + self.req(path, Some(b), Auth::Authorized) + } + + fn get(&mut self, path: &str) -> Result { + self.handle.get(true)?; + self.req(path, None, Auth::Authorized) + } + + fn delete(&mut self, path: &str, b: Option<&[u8]>) -> Result { + self.handle.custom_request("DELETE")?; + self.req(path, b, Auth::Authorized) + } + + fn req(&mut self, path: &str, body: Option<&[u8]>, authorized: Auth) -> Result { + self.handle.url(&format!("{}/api/v1{}", self.host, path))?; + let mut headers = List::new(); + headers.append("Accept: application/json")?; + headers.append("Content-Type: application/json")?; + + if authorized == Auth::Authorized { + let token = match self.token.as_ref() { + Some(s) => s, + None => bail!("no upload token found, please run `cargo login`"), + }; + headers.append(&format!("Authorization: {}", token))?; + } + self.handle.http_headers(headers)?; + match body { + Some(mut body) => { + self.handle.upload(true)?; + self.handle.in_filesize(body.len() as u64)?; + self.handle(&mut |buf| body.read(buf).unwrap_or(0)) + } + None => self.handle(&mut |_| 0), + } + } + + fn handle(&mut self, read: &mut dyn FnMut(&mut [u8]) -> usize) -> Result { + let mut headers = Vec::new(); + let mut body = Vec::new(); + let started; + { + let mut handle = self.handle.transfer(); + handle.read_function(|buf| Ok(read(buf)))?; + handle.write_function(|data| { + body.extend_from_slice(data); + Ok(data.len()) + })?; + handle.header_function(|data| { + headers.push(String::from_utf8_lossy(data).into_owned()); + true + })?; + started = Instant::now(); + handle.perform()?; + } + + let body = match String::from_utf8(body) { + Ok(body) => body, + Err(..) => bail!("response body was not valid utf-8"), + }; + let errors = serde_json::from_str::(&body) + .ok() + .map(|s| s.errors.into_iter().map(|s| s.detail).collect::>()); + + match (self.handle.response_code()?, errors) { + (0, None) | (200, None) => {} + (503, None) if started.elapsed().as_secs() >= 29 && self.host_is_crates_io() => bail!( + "Request timed out after 30 seconds. If you're trying to \ + upload a crate it may be too large. If the crate is under \ + 10MB in size, you can email help@crates.io for assistance." + ), + (code, Some(errors)) => { + let code = StatusCode::from_u16(code as _)?; + bail!("api errors (status {}): {}", code, errors.join(", ")) + } + (code, None) => bail!( + "failed to get a 200 OK response, got {}\n\ + headers:\n\ + \t{}\n\ + body:\n\ + {}", + code, + headers.join("\n\t"), + body, + ), + } + + Ok(body) + } +} diff --git a/crates/resolver-tests/Cargo.toml b/crates/resolver-tests/Cargo.toml new file mode 100644 index 00000000000..1d98c6973cc --- /dev/null +++ b/crates/resolver-tests/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "resolver-tests" +version = "0.1.0" +authors = ["Alex Crichton "] +edition = "2018" + +[dependencies] +cargo = { path = "../.." } +proptest = "0.9.1" +lazy_static = "1.3.0" +varisat = "0.2.1" +atty = "0.2.11" diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs new file mode 100644 index 00000000000..68cc96667ee --- /dev/null +++ b/crates/resolver-tests/src/lib.rs @@ -0,0 +1,978 @@ +use std::cell::RefCell; +use std::cmp::PartialEq; +use std::cmp::{max, min}; +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; +use std::fmt; +use std::fmt::Write; +use std::rc::Rc; +use std::time::Instant; + +use cargo::core::dependency::Kind; +use cargo::core::resolver::{self, ResolveOpts}; +use cargo::core::source::{GitReference, SourceId}; +use cargo::core::Resolve; +use cargo::core::{Dependency, PackageId, Registry, Summary}; +use cargo::util::{CargoResult, Config, Graph, IntoUrl, Platform}; + +use proptest::collection::{btree_map, vec}; +use proptest::prelude::*; +use proptest::sample::Index; +use proptest::string::string_regex; +use varisat::{self, ExtendFormula}; + +pub fn resolve(deps: Vec, registry: &[Summary]) -> CargoResult> { + resolve_with_config(deps, registry, None) +} + +pub fn resolve_and_validated( + deps: Vec, + registry: &[Summary], + sat_resolve: Option, +) -> CargoResult> { + let resolve = resolve_with_config_raw(deps.clone(), registry, None); + + match resolve { + Err(e) => { + let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry)); + if sat_resolve.sat_resolve(&deps) { + panic!( + "the resolve err but the sat_resolve thinks this will work:\n{}", + sat_resolve.use_packages().unwrap() + ); + } + Err(e) + } + Ok(resolve) => { + let mut stack = vec![pkg_id("root")]; + let mut used = HashSet::new(); + let mut links = HashSet::new(); + while let Some(p) = stack.pop() { + assert!(resolve.contains(&p)); + if used.insert(p) { + // in the tests all `links` crates end in `-sys` + if p.name().ends_with("-sys") { + assert!(links.insert(p.name())); + } + stack.extend(resolve.deps(p).map(|(dp, deps)| { + for d in deps { + assert!(d.matches_id(dp)); + } + dp + })); + } + } + let out = resolve.sort(); + assert_eq!(out.len(), used.len()); + + let mut pub_deps: HashMap> = HashMap::new(); + for &p in out.iter() { + // make the list of `p` public dependencies + let mut self_pub_dep = HashSet::new(); + self_pub_dep.insert(p); + for (dp, deps) in resolve.deps(p) { + if deps.iter().any(|d| d.is_public()) { + self_pub_dep.extend(pub_deps[&dp].iter().cloned()) + } + } + pub_deps.insert(p, self_pub_dep); + + // check if `p` has a public dependencies conflicts + let seen_dep: BTreeSet<_> = resolve + .deps(p) + .flat_map(|(dp, _)| pub_deps[&dp].iter().cloned()) + .collect(); + let seen_dep: Vec<_> = seen_dep.iter().collect(); + for a in seen_dep.windows(2) { + if a[0].name() == a[1].name() { + panic!( + "the package {:?} can publicly see {:?} and {:?}", + p, a[0], a[1] + ) + } + } + } + let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry)); + if !sat_resolve.sat_is_valid_solution(&out) { + panic!( + "the sat_resolve err but the resolve thinks this will work:\n{:?}", + resolve + ); + } + Ok(out) + } + } +} + +pub fn resolve_with_config( + deps: Vec, + registry: &[Summary], + config: Option<&Config>, +) -> CargoResult> { + let resolve = resolve_with_config_raw(deps, registry, config)?; + Ok(resolve.sort()) +} + +pub fn resolve_with_config_raw( + deps: Vec, + registry: &[Summary], + config: Option<&Config>, +) -> CargoResult { + struct MyRegistry<'a> { + list: &'a [Summary], + used: HashSet, + }; + impl<'a> Registry for MyRegistry<'a> { + fn query( + &mut self, + dep: &Dependency, + f: &mut dyn FnMut(Summary), + fuzzy: bool, + ) -> CargoResult<()> { + for summary in self.list.iter() { + if fuzzy || dep.matches(summary) { + self.used.insert(summary.package_id()); + f(summary.clone()); + } + } + Ok(()) + } + + fn describe_source(&self, _src: SourceId) -> String { + String::new() + } + + fn is_replaced(&self, _src: SourceId) -> bool { + false + } + } + impl<'a> Drop for MyRegistry<'a> { + fn drop(&mut self) { + if std::thread::panicking() && self.list.len() != self.used.len() { + // we found a case that causes a panic and did not use all of the input. + // lets print the part of the input that was used for minimization. + println!( + "{:?}", + PrettyPrintRegistry( + self.list + .iter() + .filter(|s| { self.used.contains(&s.package_id()) }) + .cloned() + .collect() + ) + ); + } + } + } + let mut registry = MyRegistry { + list: registry, + used: HashSet::new(), + }; + let summary = Summary::new( + pkg_id("root"), + deps, + &BTreeMap::, Vec)>::new(), + None::, + false, + ) + .unwrap(); + let opts = ResolveOpts::everything(); + let start = Instant::now(); + let resolve = resolver::resolve( + &[(summary, opts)], + &[], + &mut registry, + &HashSet::new(), + config, + true, + ); + + // The largest test in our suite takes less then 30 sec. + // So lets fail the test if we have ben running for two long. + assert!(start.elapsed().as_secs() < 60); + resolve +} + +const fn num_bits() -> usize { + std::mem::size_of::() * 8 +} + +fn log_bits(x: usize) -> usize { + if x == 0 { + return 0; + } + assert!(x > 0); + (num_bits::() as u32 - x.leading_zeros()) as usize +} + +fn sat_at_most_one(solver: &mut impl varisat::ExtendFormula, vars: &[varisat::Var]) { + if vars.len() <= 1 { + return; + } else if vars.len() == 2 { + solver.add_clause(&[vars[0].negative(), vars[1].negative()]); + return; + } else if vars.len() == 3 { + solver.add_clause(&[vars[0].negative(), vars[1].negative()]); + solver.add_clause(&[vars[0].negative(), vars[2].negative()]); + solver.add_clause(&[vars[1].negative(), vars[2].negative()]); + return; + } + // use the "Binary Encoding" from + // https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf + let bits: Vec = solver.new_var_iter(log_bits(vars.len())).collect(); + for (i, p) in vars.iter().enumerate() { + for b in 0..bits.len() { + solver.add_clause(&[p.negative(), bits[b].lit(((1 << b) & i) > 0)]); + } + } +} + +fn sat_at_most_one_by_key( + cnf: &mut impl varisat::ExtendFormula, + data: impl Iterator, +) -> HashMap> { + // no two packages with the same links set + let mut by_keys: HashMap> = HashMap::new(); + for (p, v) in data { + by_keys.entry(p).or_default().push(v) + } + for key in by_keys.values() { + sat_at_most_one(cnf, key); + } + by_keys +} + +/// Resolution can be reduced to the SAT problem. So this is an alternative implementation +/// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read, +/// as compared to the real resolver. +/// +/// For the subset of functionality that are currently made by `registry_strategy` this will, +/// find a valid resolution if one exists. The big thing that the real resolver does, +/// that this one does not do is work with features and optional dependencies. +/// +/// The SAT library dose not optimize for the newer version, +/// so the selected packages may not match the real resolver. +#[derive(Clone)] +pub struct SatResolve(Rc>); +struct SatResolveInner { + solver: varisat::Solver<'static>, + var_for_is_packages_used: HashMap, + by_name: HashMap<&'static str, Vec>, +} + +impl SatResolve { + pub fn new(registry: &[Summary]) -> Self { + let mut cnf = varisat::CnfFormula::new(); + let var_for_is_packages_used: HashMap = registry + .iter() + .map(|s| (s.package_id(), cnf.new_var())) + .collect(); + + // no two packages with the same links set + sat_at_most_one_by_key( + &mut cnf, + registry + .iter() + .map(|s| (s.links(), var_for_is_packages_used[&s.package_id()])) + .filter(|(l, _)| l.is_some()), + ); + + // no two semver compatible versions of the same package + let by_activations_keys = sat_at_most_one_by_key( + &mut cnf, + var_for_is_packages_used + .iter() + .map(|(p, &v)| (p.as_activations_key(), v)), + ); + + let mut by_name: HashMap<&'static str, Vec> = HashMap::new(); + + for p in registry.iter() { + by_name + .entry(p.name().as_str()) + .or_default() + .push(p.package_id()) + } + + let empty_vec = vec![]; + + let mut graph: Graph = Graph::new(); + + let mut version_selected_for: HashMap< + PackageId, + HashMap>, + > = HashMap::new(); + // active packages need each of there `deps` to be satisfied + for p in registry.iter() { + graph.add(p.package_id()); + for dep in p.dependencies() { + // This can more easily be written as: + // !is_active(p) or one of the things that match dep is_active + // All the complexity, from here to the end, is to support public and private dependencies! + let mut by_key: HashMap<_, Vec> = HashMap::new(); + for &m in by_name + .get(dep.package_name().as_str()) + .unwrap_or(&empty_vec) + .iter() + .filter(|&p| dep.matches_id(*p)) + { + graph.link(p.package_id(), m); + by_key + .entry(m.as_activations_key()) + .or_default() + .push(var_for_is_packages_used[&m].positive()); + } + let keys: HashMap<_, _> = by_key.keys().map(|&k| (k, cnf.new_var())).collect(); + + // if `p` is active then we need to select one of the keys + let matches: Vec<_> = keys + .values() + .map(|v| v.positive()) + .chain(Some(var_for_is_packages_used[&p.package_id()].negative())) + .collect(); + cnf.add_clause(&matches); + + // if a key is active then we need to select one of the versions + for (key, vars) in by_key.iter() { + let mut matches = vars.clone(); + matches.push(keys[key].negative()); + cnf.add_clause(&matches); + } + + version_selected_for + .entry(p.package_id()) + .or_default() + .insert(dep.clone(), keys); + } + } + + let topological_order = graph.sort(); + + // we already ensure there is only one version for each `activations_key` so we can think of + // `publicly_exports` as being in terms of a set of `activations_key`s + let mut publicly_exports: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new(); + + for &key in by_activations_keys.keys() { + // everything publicly depends on itself + let var = publicly_exports + .entry(key) + .or_default() + .entry(key) + .or_insert_with(|| cnf.new_var()); + cnf.add_clause(&[var.positive()]); + } + + // if a `dep` is public then `p` `publicly_exports` all the things that the selected version `publicly_exports` + for &p in topological_order.iter() { + if let Some(deps) = version_selected_for.get(&p) { + let mut p_exports = publicly_exports.remove(&p.as_activations_key()).unwrap(); + for (_, versions) in deps.iter().filter(|(d, _)| d.is_public()) { + for (ver, sel) in versions { + for (&export_pid, &export_var) in publicly_exports[ver].iter() { + let our_var = + p_exports.entry(export_pid).or_insert_with(|| cnf.new_var()); + cnf.add_clause(&[ + sel.negative(), + export_var.negative(), + our_var.positive(), + ]); + } + } + } + publicly_exports.insert(p.as_activations_key(), p_exports); + } + } + + // we already ensure there is only one version for each `activations_key` so we can think of + // `can_see` as being in terms of a set of `activations_key`s + // and if `p` `publicly_exports` `export` then it `can_see` `export` + let mut can_see: HashMap<_, HashMap<_, varisat::Var>> = HashMap::new(); + + // if `p` has a `dep` that selected `ver` then it `can_see` all the things that the selected version `publicly_exports` + for (&p, deps) in version_selected_for.iter() { + let p_can_see = can_see.entry(p).or_default(); + for (_, versions) in deps.iter() { + for (&ver, sel) in versions { + for (&export_pid, &export_var) in publicly_exports[&ver].iter() { + let our_var = p_can_see.entry(export_pid).or_insert_with(|| cnf.new_var()); + cnf.add_clause(&[ + sel.negative(), + export_var.negative(), + our_var.positive(), + ]); + } + } + } + } + + // a package `can_see` only one version by each name + for (_, see) in can_see.iter() { + sat_at_most_one_by_key(&mut cnf, see.iter().map(|((name, _, _), &v)| (name, v))); + } + let mut solver = varisat::Solver::new(); + solver.add_formula(&cnf); + + // We dont need to `solve` now. We know that "use nothing" will satisfy all the clauses so far. + // But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions. + solver + .solve() + .expect("docs say it can't error in default config"); + SatResolve(Rc::new(RefCell::new(SatResolveInner { + solver, + var_for_is_packages_used, + by_name, + }))) + } + pub fn sat_resolve(&self, deps: &[Dependency]) -> bool { + let mut s = self.0.borrow_mut(); + let mut assumption = vec![]; + let mut this_call = None; + + // the starting `deps` need to be satisfied + for dep in deps.iter() { + let empty_vec = vec![]; + let matches: Vec = s + .by_name + .get(dep.package_name().as_str()) + .unwrap_or(&empty_vec) + .iter() + .filter(|&p| dep.matches_id(*p)) + .map(|p| s.var_for_is_packages_used[p].positive()) + .collect(); + if matches.is_empty() { + return false; + } else if matches.len() == 1 { + assumption.extend_from_slice(&matches) + } else { + if this_call.is_none() { + let new_var = s.solver.new_var(); + this_call = Some(new_var); + assumption.push(new_var.positive()); + } + let mut matches = matches; + matches.push(this_call.unwrap().negative()); + s.solver.add_clause(&matches); + } + } + + s.solver.assume(&assumption); + + s.solver + .solve() + .expect("docs say it can't error in default config") + } + pub fn sat_is_valid_solution(&self, pids: &[PackageId]) -> bool { + let mut s = self.0.borrow_mut(); + for p in pids { + if p.name().as_str() != "root" && !s.var_for_is_packages_used.contains_key(p) { + return false; + } + } + let assumption: Vec<_> = s + .var_for_is_packages_used + .iter() + .map(|(p, v)| v.lit(pids.contains(p))) + .collect(); + + s.solver.assume(&assumption); + + s.solver + .solve() + .expect("docs say it can't error in default config") + } + fn use_packages(&self) -> Option { + self.0.borrow().solver.model().map(|lits| { + let lits: HashSet<_> = lits + .iter() + .filter(|l| l.is_positive()) + .map(|l| l.var()) + .collect(); + let mut out = String::new(); + out.push_str("used:\n"); + for (p, v) in self.0.borrow().var_for_is_packages_used.iter() { + if lits.contains(v) { + writeln!(&mut out, " {}", p).unwrap(); + } + } + out + }) + } +} + +pub trait ToDep { + fn to_dep(self) -> Dependency; +} + +impl ToDep for &'static str { + fn to_dep(self) -> Dependency { + Dependency::parse_no_deprecated(self, Some("1.0.0"), registry_loc()).unwrap() + } +} + +impl ToDep for Dependency { + fn to_dep(self) -> Dependency { + self + } +} + +pub trait ToPkgId { + fn to_pkgid(&self) -> PackageId; +} + +impl ToPkgId for PackageId { + fn to_pkgid(&self) -> PackageId { + *self + } +} + +impl<'a> ToPkgId for &'a str { + fn to_pkgid(&self) -> PackageId { + PackageId::new(*self, "1.0.0", registry_loc()).unwrap() + } +} + +impl, U: AsRef> ToPkgId for (T, U) { + fn to_pkgid(&self) -> PackageId { + let (name, vers) = self; + PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap() + } +} + +#[macro_export] +macro_rules! pkg { + ($pkgid:expr => [$($deps:expr),+ $(,)* ]) => ({ + let d: Vec = vec![$($deps.to_dep()),+]; + $crate::pkg_dep($pkgid, d) + }); + + ($pkgid:expr) => ({ + $crate::pkg($pkgid) + }) +} + +fn registry_loc() -> SourceId { + lazy_static::lazy_static! { + static ref EXAMPLE_DOT_COM: SourceId = + SourceId::for_registry(&"https://example.com".into_url().unwrap()).unwrap(); + } + *EXAMPLE_DOT_COM +} + +pub fn pkg(name: T) -> Summary { + pkg_dep(name, Vec::new()) +} + +pub fn pkg_dep(name: T, dep: Vec) -> Summary { + let pkgid = name.to_pkgid(); + let link = if pkgid.name().ends_with("-sys") { + Some(pkgid.name().as_str()) + } else { + None + }; + Summary::new( + name.to_pkgid(), + dep, + &BTreeMap::, Vec)>::new(), + link, + false, + ) + .unwrap() +} + +pub fn pkg_id(name: &str) -> PackageId { + PackageId::new(name, "1.0.0", registry_loc()).unwrap() +} + +fn pkg_id_loc(name: &str, loc: &str) -> PackageId { + let remote = loc.into_url(); + let master = GitReference::Branch("master".to_string()); + let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap(); + + PackageId::new(name, "1.0.0", source_id).unwrap() +} + +pub fn pkg_loc(name: &str, loc: &str) -> Summary { + let link = if name.ends_with("-sys") { + Some(name) + } else { + None + }; + Summary::new( + pkg_id_loc(name, loc), + Vec::new(), + &BTreeMap::, Vec)>::new(), + link, + false, + ) + .unwrap() +} + +pub fn remove_dep(sum: &Summary, ind: usize) -> Summary { + let mut deps = sum.dependencies().to_vec(); + deps.remove(ind); + // note: more things will need to be copied over in the future, but it works for now. + Summary::new( + sum.package_id(), + deps, + &BTreeMap::, Vec)>::new(), + sum.links().map(|a| a.as_str()), + sum.namespaced_features(), + ) + .unwrap() +} + +pub fn dep(name: &str) -> Dependency { + dep_req(name, "*") +} +pub fn dep_req(name: &str, req: &str) -> Dependency { + Dependency::parse_no_deprecated(name, Some(req), registry_loc()).unwrap() +} +pub fn dep_req_kind(name: &str, req: &str, kind: Kind, public: bool) -> Dependency { + let mut dep = dep_req(name, req); + dep.set_kind(kind); + dep.set_public(public); + dep +} + +pub fn dep_loc(name: &str, location: &str) -> Dependency { + let url = location.into_url().unwrap(); + let master = GitReference::Branch("master".to_string()); + let source_id = SourceId::for_git(&url, master).unwrap(); + Dependency::parse_no_deprecated(name, Some("1.0.0"), source_id).unwrap() +} +pub fn dep_kind(name: &str, kind: Kind) -> Dependency { + dep(name).set_kind(kind).clone() +} + +pub fn registry(pkgs: Vec) -> Vec { + pkgs +} + +pub fn names(names: &[P]) -> Vec { + names.iter().map(|name| name.to_pkgid()).collect() +} + +pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { + names + .iter() + .map(|&(name, loc)| pkg_id_loc(name, loc)) + .collect() +} + +/// By default `Summary` and `Dependency` have a very verbose `Debug` representation. +/// This replaces with a representation that uses constructors from this file. +/// +/// If `registry_strategy` is improved to modify more fields +/// then this needs to update to display the corresponding constructor. +pub struct PrettyPrintRegistry(pub Vec); + +impl fmt::Debug for PrettyPrintRegistry { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "vec![")?; + for s in &self.0 { + if s.dependencies().is_empty() { + write!(f, "pkg!((\"{}\", \"{}\")),", s.name(), s.version())?; + } else { + write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?; + for d in s.dependencies() { + if d.kind() == Kind::Normal + && &d.version_req().to_string() == "*" + && !d.is_public() + { + write!(f, "dep(\"{}\"),", d.name_in_toml())?; + } else if d.kind() == Kind::Normal && !d.is_public() { + write!( + f, + "dep_req(\"{}\", \"{}\"),", + d.name_in_toml(), + d.version_req() + )?; + } else { + write!( + f, + "dep_req_kind(\"{}\", \"{}\", {}, {}),", + d.name_in_toml(), + d.version_req(), + match d.kind() { + Kind::Development => "Kind::Development", + Kind::Build => "Kind::Build", + Kind::Normal => "Kind::Normal", + }, + d.is_public() + )?; + } + } + write!(f, "]),")?; + } + } + write!(f, "]") + } +} + +#[test] +fn meta_test_deep_pretty_print_registry() { + assert_eq!( + &format!( + "{:?}", + PrettyPrintRegistry(vec![ + pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), + pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), + pkg!(("foo", "2.0.0") => [dep_req("bar", "*")]), + pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), + dep_req("other", "1")]), + pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), + pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), + pkg!(("baz", "1.0.1")), + pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", Kind::Build, false)]), + pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", Kind::Development, false)]), + pkg!(("dep_req", "1.0.0")), + pkg!(("dep_req", "2.0.0")), + ]) + ), + "vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\ + pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\ + pkg!((\"foo\", \"2.0.0\") => [dep(\"bar\"),]),\ + pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"= 1.0.2\"),dep_req(\"other\", \"^1\"),]),\ + pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"= 1.0.1\"),]),\ + pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\ + pkg!((\"baz\", \"1.0.1\")),\ + pkg!((\"cat\", \"1.0.2\") => [dep_req_kind(\"other\", \"^2\", Kind::Build, false),]),\ + pkg!((\"cat\", \"1.0.3\") => [dep_req_kind(\"other\", \"^2\", Kind::Development, false),]),\ + pkg!((\"dep_req\", \"1.0.0\")),\ + pkg!((\"dep_req\", \"2.0.0\")),]" + ) +} + +/// This generates a random registry index. +/// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..) +/// This strategy has a high probability of having valid dependencies +pub fn registry_strategy( + max_crates: usize, + max_versions: usize, + shrinkage: usize, +) -> impl Strategy { + let name = string_regex("[A-Za-z][A-Za-z0-9_-]*(-sys)?").unwrap(); + + let raw_version = ..max_versions.pow(3); + let version_from_raw = move |r: usize| { + let major = ((r / max_versions) / max_versions) % max_versions; + let minor = (r / max_versions) % max_versions; + let patch = r % max_versions; + format!("{}.{}.{}", major, minor, patch) + }; + + // If this is false than the crate will depend on the nonexistent "bad" + // instead of the complex set we generated for it. + let allow_deps = prop::bool::weighted(0.99); + + let list_of_versions = + btree_map(raw_version, allow_deps, 1..=max_versions).prop_map(move |ver| { + ver.into_iter() + .map(|a| (version_from_raw(a.0), a.1)) + .collect::>() + }); + + let list_of_crates_with_versions = + btree_map(name, list_of_versions, 1..=max_crates).prop_map(|mut vers| { + // root is the name of the thing being compiled + // so it would be confusing to have it in the index + vers.remove("root"); + // bad is a name reserved for a dep that won't work + vers.remove("bad"); + vers + }); + + // each version of each crate can depend on each crate smaller then it. + // In theory shrinkage should be 2, but in practice we get better trees with a larger value. + let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage; + + let raw_version_range = (any::(), any::()); + let raw_dependency = ( + any::(), + any::(), + raw_version_range, + 0..=1, + Just(false), + // TODO: ^ this needs to be set back to `any::()` and work before public & private dependencies can stabilize + ); + + fn order_index(a: Index, b: Index, size: usize) -> (usize, usize) { + let (a, b) = (a.index(size), b.index(size)); + (min(a, b), max(a, b)) + } + + let list_of_raw_dependency = vec(raw_dependency, ..=max_deps); + + // By default a package depends only on other packages that have a smaller name, + // this helps make sure that all things in the resulting index are DAGs. + // If this is true then the DAG is maintained with grater instead. + let reverse_alphabetical = any::().no_shrink(); + + ( + list_of_crates_with_versions, + list_of_raw_dependency, + reverse_alphabetical, + ) + .prop_map( + |(crate_vers_by_name, raw_dependencies, reverse_alphabetical)| { + let list_of_pkgid: Vec<_> = crate_vers_by_name + .iter() + .flat_map(|(name, vers)| vers.iter().map(move |x| ((name.as_str(), &x.0), x.1))) + .collect(); + let len_all_pkgid = list_of_pkgid.len(); + let mut dependency_by_pkgid = vec![vec![]; len_all_pkgid]; + for (a, b, (c, d), k, p) in raw_dependencies { + let (a, b) = order_index(a, b, len_all_pkgid); + let (a, b) = if reverse_alphabetical { (b, a) } else { (a, b) }; + let ((dep_name, _), _) = list_of_pkgid[a]; + if (list_of_pkgid[b].0).0 == dep_name { + continue; + } + let s = &crate_vers_by_name[dep_name]; + let s_last_index = s.len() - 1; + let (c, d) = order_index(c, d, s.len()); + + dependency_by_pkgid[b].push(dep_req_kind( + dep_name, + &if c == 0 && d == s_last_index { + "*".to_string() + } else if c == 0 { + format!("<={}", s[d].0) + } else if d == s_last_index { + format!(">={}", s[c].0) + } else if c == d { + format!("={}", s[c].0) + } else { + format!(">={}, <={}", s[c].0, s[d].0) + }, + match k { + 0 => Kind::Normal, + 1 => Kind::Build, + // => Kind::Development, // Development has no impact so don't gen + _ => panic!("bad index for Kind"), + }, + p && k == 0, + )) + } + + let mut out: Vec = list_of_pkgid + .into_iter() + .zip(dependency_by_pkgid.into_iter()) + .map(|(((name, ver), allow_deps), deps)| { + pkg_dep( + (name, ver).to_pkgid(), + if !allow_deps { + vec![dep_req("bad", "*")] + } else { + let mut deps = deps; + deps.sort_by_key(|d| d.name_in_toml()); + deps.dedup_by_key(|d| d.name_in_toml()); + deps + }, + ) + }) + .collect(); + + if reverse_alphabetical { + // make sure the complicated cases are at the end + out.reverse(); + } + + PrettyPrintRegistry(out) + }, + ) +} + +/// This test is to test the generator to ensure +/// that it makes registries with large dependency trees +#[test] +fn meta_test_deep_trees_from_strategy() { + use proptest::strategy::ValueTree; + use proptest::test_runner::TestRunner; + + let mut dis = [0; 21]; + + let strategy = registry_strategy(50, 20, 60); + let mut test_runner = TestRunner::deterministic(); + for _ in 0..128 { + let PrettyPrintRegistry(input) = strategy + .new_tree(&mut TestRunner::new_with_rng( + Default::default(), + test_runner.new_rng(), + )) + .unwrap() + .current(); + let reg = registry(input.clone()); + for this in input.iter().rev().take(10) { + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ); + dis[res + .as_ref() + .map(|x| min(x.len(), dis.len()) - 1) + .unwrap_or(0)] += 1; + if dis.iter().all(|&x| x > 0) { + return; + } + } + } + + panic!( + "In 1280 tries we did not see a wide enough distribution of dependency trees! dis: {:?}", + dis + ); +} + +/// This test is to test the generator to ensure +/// that it makes registries that include multiple versions of the same library +#[test] +fn meta_test_multiple_versions_strategy() { + use proptest::strategy::ValueTree; + use proptest::test_runner::TestRunner; + + let mut dis = [0; 10]; + + let strategy = registry_strategy(50, 20, 60); + let mut test_runner = TestRunner::deterministic(); + for _ in 0..128 { + let PrettyPrintRegistry(input) = strategy + .new_tree(&mut TestRunner::new_with_rng( + Default::default(), + test_runner.new_rng(), + )) + .unwrap() + .current(); + let reg = registry(input.clone()); + for this in input.iter().rev().take(10) { + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ); + if let Ok(mut res) = res { + let res_len = res.len(); + res.sort_by_key(|s| s.name()); + res.dedup_by_key(|s| s.name()); + dis[min(res_len - res.len(), dis.len() - 1)] += 1; + } + if dis.iter().all(|&x| x > 0) { + return; + } + } + } + panic!( + "In 1280 tries we did not see a wide enough distribution of multiple versions of the same library! dis: {:?}", + dis + ); +} + +/// Assert `xs` contains `elems` +pub fn assert_contains(xs: &[A], elems: &[A]) { + for elem in elems { + assert!(xs.contains(elem)); + } +} + +pub fn assert_same(a: &[A], b: &[A]) { + assert_eq!(a.len(), b.len()); + assert_contains(b, a); +} \ No newline at end of file diff --git a/crates/resolver-tests/tests/resolve.rs b/crates/resolver-tests/tests/resolve.rs new file mode 100644 index 00000000000..0c6512186d7 --- /dev/null +++ b/crates/resolver-tests/tests/resolve.rs @@ -0,0 +1,1420 @@ +use std::env; + +use cargo::core::dependency::Kind; +use cargo::core::{enable_nightly_features, Dependency}; +use cargo::util::{is_ci, Config}; + +use resolver_tests::{ + assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, dep_req_kind, loc_names, names, + pkg, pkg_id, pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated, + resolve_with_config, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId, +}; + +use proptest::prelude::*; + +// NOTE: proptest is a form of fuzz testing. It generates random input and makes sure that +// certain universal truths are upheld. Therefore, it can pass when there is a problem, +// but if it fails then there really is something wrong. When testing something as +// complicated as the resolver, the problems can be very subtle and hard to generate. +// We have had a history of these tests only failing on PRs long after a bug is introduced. +// If you have one of these test fail please report it on #6258, +// and if you did not change the resolver then feel free to retry without concern. +proptest! { + #![proptest_config(ProptestConfig { + max_shrink_iters: + if is_ci() || !atty::is(atty::Stream::Stderr) { + // This attempts to make sure that CI will fail fast, + 0 + } else { + // but that local builds will give a small clear test case. + std::u32::MAX + }, + result_cache: prop::test_runner::basic_result_cache, + .. ProptestConfig::default() + })] + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_passes_validation( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) + ) { + let reg = registry(input.clone()); + let sat_resolve = SatResolve::new(®); + // there is only a small chance that any one + // crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(20) { + let _ = resolve_and_validated( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + Some(sat_resolve.clone()), + ); + } + } + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_minimum_version_errors_the_same( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) + ) { + enable_nightly_features(); + + let mut config = Config::default().unwrap(); + config + .configure( + 1, + None, + &None, + false, + false, + false, + &None, + &["minimal-versions".to_string()], + ) + .unwrap(); + + let reg = registry(input.clone()); + // there is only a small chance that any one + // crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(10) { + // minimal-versions change what order the candidates + // are tried but not the existence of a solution + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ); + + let mres = resolve_with_config( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + Some(&config), + ); + + prop_assert_eq!( + res.is_ok(), + mres.is_ok(), + "minimal-versions and regular resolver disagree about weather `{} = \"={}\"` can resolve", + this.name(), + this.version() + ) + } + } + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_removing_a_dep_cant_break( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60), + indexes_to_remove in prop::collection::vec((any::(), any::()), ..10) + ) { + let reg = registry(input.clone()); + let mut removed_input = input.clone(); + for (summary_idx, dep_idx) in indexes_to_remove { + if !removed_input.is_empty() { + let summary_idx = summary_idx.index(removed_input.len()); + let deps = removed_input[summary_idx].dependencies(); + if !deps.is_empty() { + let new = remove_dep(&removed_input[summary_idx], dep_idx.index(deps.len())); + removed_input[summary_idx] = new; + } + } + } + let removed_reg = registry(removed_input); + // there is only a small chance that any one + // crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(10) { + if resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ).is_ok() { + prop_assert!( + resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + &removed_reg, + ).is_ok(), + "full index worked for `{} = \"={}\"` but removing some deps broke it!", + this.name(), + this.version(), + ) + } + } + } + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_limited_independence_of_irrelevant_alternatives( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60), + indexes_to_unpublish in prop::collection::vec(any::(), ..10) + ) { + let reg = registry(input.clone()); + // there is only a small chance that any one + // crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(10) { + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ); + + match res { + Ok(r) => { + // If resolution was successful, then unpublishing a version of a crate + // that was not selected should not change that. + let not_selected: Vec<_> = input + .iter() + .cloned() + .filter(|x| !r.contains(&x.package_id())) + .collect(); + if !not_selected.is_empty() { + let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(¬_selected)).collect(); + + let new_reg = registry( + input + .iter() + .cloned() + .filter(|x| !indexes_to_unpublish.contains(&x)) + .collect(), + ); + + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + &new_reg, + ); + + // Note: that we can not assert that the two `res` are identical + // as the resolver does depend on irrelevant alternatives. + // It uses how constrained a dependency requirement is + // to determine what order to evaluate requirements. + + prop_assert!( + res.is_ok(), + "unpublishing {:?} stopped `{} = \"={}\"` from working", + indexes_to_unpublish.iter().map(|x| x.package_id()).collect::>(), + this.name(), + this.version() + ) + } + } + + Err(_) => { + // If resolution was unsuccessful, then it should stay unsuccessful + // even if any version of a crate is unpublished. + let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&input)).collect(); + + let new_reg = registry( + input + .iter() + .cloned() + .filter(|x| !indexes_to_unpublish.contains(&x)) + .collect(), + ); + + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + &new_reg, + ); + + prop_assert!( + res.is_err(), + "full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!", + this.name(), + this.version(), + indexes_to_unpublish.iter().map(|x| x.package_id()).collect::>(), + ) + } + } + } + } +} + +#[test] +fn pub_fail() { + let input = vec![ + pkg!(("a", "0.0.4")), + pkg!(("a", "0.0.5")), + pkg!(("e", "0.0.6") => [dep_req_kind("a", "<= 0.0.4", Kind::Normal, true),]), + pkg!(("kB", "0.0.3") => [dep_req("a", ">= 0.0.5"),dep("e"),]), + ]; + let reg = registry(input.clone()); + assert!(resolve_and_validated(vec![dep("kB")], ®, None).is_err()); +} + +#[test] +fn basic_public_dependency() { + let reg = registry(vec![ + pkg!(("A", "0.1.0")), + pkg!(("A", "0.2.0")), + pkg!("B" => [dep_req_kind("A", "0.1", Kind::Normal, true)]), + pkg!("C" => [dep("A"), dep("B")]), + ]); + + let res = resolve_and_validated(vec![dep("C")], ®, None).unwrap(); + assert_same( + &res, + &names(&[ + ("root", "1.0.0"), + ("C", "1.0.0"), + ("B", "1.0.0"), + ("A", "0.1.0"), + ]), + ); +} + +#[test] +fn public_dependency_filling_in() { + // The resolver has an optimization where if a candidate to resolve a dependency + // has already bean activated then we skip looking at the candidates dependencies. + // However, we have to be careful as the new path may make pub dependencies invalid. + + // Triggering this case requires dependencies to be resolved in a specific order. + // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations: + // 1. `d`'s dep on `c` is resolved + // 2. `d`'s dep on `a` is resolved with `0.1.1` + // 3. `c`'s dep on `b` is resolved with `0.0.2` + // 4. `b`'s dep on `a` is resolved with `0.0.6` no pub dev conflict as `b` is private to `c` + // 5. `d`'s dep on `b` is resolved with `0.0.2` triggering the optimization. + // Do we notice that `d` has a pub dep conflict on `a`? Lets try it and see. + let reg = registry(vec![ + pkg!(("a", "0.0.6")), + pkg!(("a", "0.1.1")), + pkg!(("b", "0.0.0") => [dep("bad")]), + pkg!(("b", "0.0.1") => [dep("bad")]), + pkg!(("b", "0.0.2") => [dep_req_kind("a", "=0.0.6", Kind::Normal, true)]), + pkg!("c" => [dep_req("b", ">=0.0.1")]), + pkg!("d" => [dep("c"), dep("a"), dep("b")]), + ]); + + let res = resolve_and_validated(vec![dep("d")], ®, None).unwrap(); + assert_same( + &res, + &names(&[ + ("root", "1.0.0"), + ("d", "1.0.0"), + ("c", "1.0.0"), + ("b", "0.0.2"), + ("a", "0.0.6"), + ]), + ); +} + +#[test] +fn public_dependency_filling_in_and_update() { + // The resolver has an optimization where if a candidate to resolve a dependency + // has already bean activated then we skip looking at the candidates dependencies. + // However, we have to be careful as the new path may make pub dependencies invalid. + + // Triggering this case requires dependencies to be resolved in a specific order. + // Fuzzing found this unintuitive case, that triggers this unfortunate order of operations: + // 1. `D`'s dep on `B` is resolved + // 2. `D`'s dep on `C` is resolved + // 3. `B`'s dep on `A` is resolved with `0.0.0` + // 4. `C`'s dep on `B` triggering the optimization. + // So did we add `A 0.0.0` to the deps `C` can see? + // Or are we going to resolve `C`'s dep on `A` with `0.0.2`? + // Lets try it and see. + let reg = registry(vec![ + pkg!(("A", "0.0.0")), + pkg!(("A", "0.0.2")), + pkg!("B" => [dep_req_kind("A", "=0.0.0", Kind::Normal, true),]), + pkg!("C" => [dep("A"),dep("B")]), + pkg!("D" => [dep("B"),dep("C")]), + ]); + let res = resolve_and_validated(vec![dep("D")], ®, None).unwrap(); + assert_same( + &res, + &names(&[ + ("root", "1.0.0"), + ("D", "1.0.0"), + ("C", "1.0.0"), + ("B", "1.0.0"), + ("A", "0.0.0"), + ]), + ); +} + +#[test] +fn public_dependency_skipping() { + // When backtracking due to a failed dependency, if Cargo is + // trying to be clever and skip irrelevant dependencies, care must + // the effects of pub dep must be accounted for. + let input = vec![ + pkg!(("a", "0.2.0")), + pkg!(("a", "2.0.0")), + pkg!(("b", "0.0.0") => [dep("bad")]), + pkg!(("b", "0.2.1") => [dep_req_kind("a", "0.2.0", Kind::Normal, true)]), + pkg!("c" => [dep("a"),dep("b")]), + ]; + let reg = registry(input); + + resolve_and_validated(vec![dep("c")], ®, None).unwrap(); +} + +#[test] +fn public_dependency_skipping_in_backtracking() { + // When backtracking due to a failed dependency, if Cargo is + // trying to be clever and skip irrelevant dependencies, care must + // the effects of pub dep must be accounted for. + let input = vec![ + pkg!(("A", "0.0.0") => [dep("bad")]), + pkg!(("A", "0.0.1") => [dep("bad")]), + pkg!(("A", "0.0.2") => [dep("bad")]), + pkg!(("A", "0.0.3") => [dep("bad")]), + pkg!(("A", "0.0.4")), + pkg!(("A", "0.0.5")), + pkg!("B" => [dep_req_kind("A", ">= 0.0.3", Kind::Normal, true)]), + pkg!("C" => [dep_req("A", "<= 0.0.4"), dep("B")]), + ]; + let reg = registry(input); + + resolve_and_validated(vec![dep("C")], ®, None).unwrap(); +} + +#[test] +fn public_sat_topological_order() { + let input = vec![ + pkg!(("a", "0.0.1")), + pkg!(("a", "0.0.0")), + pkg!(("b", "0.0.1") => [dep_req_kind("a", "= 0.0.1", Kind::Normal, true),]), + pkg!(("b", "0.0.0") => [dep("bad"),]), + pkg!("A" => [dep_req("a", "= 0.0.0"),dep_req_kind("b", "*", Kind::Normal, true)]), + ]; + + let reg = registry(input); + assert!(resolve_and_validated(vec![dep("A")], ®, None).is_err()); +} + +#[test] +fn public_sat_unused_makes_things_pub() { + let input = vec![ + pkg!(("a", "0.0.1")), + pkg!(("a", "0.0.0")), + pkg!(("b", "8.0.1") => [dep_req_kind("a", "= 0.0.1", Kind::Normal, true),]), + pkg!(("b", "8.0.0") => [dep_req("a", "= 0.0.1"),]), + pkg!("c" => [dep_req("b", "= 8.0.0"),dep_req("a", "= 0.0.0"),]), + ]; + let reg = registry(input); + + resolve_and_validated(vec![dep("c")], ®, None).unwrap(); +} + +#[test] +fn public_sat_unused_makes_things_pub_2() { + let input = vec![ + pkg!(("c", "0.0.2")), + pkg!(("c", "0.0.1")), + pkg!(("a-sys", "0.0.2")), + pkg!(("a-sys", "0.0.1") => [dep_req_kind("c", "= 0.0.1", Kind::Normal, true),]), + pkg!("P" => [dep_req_kind("a-sys", "*", Kind::Normal, true),dep_req("c", "= 0.0.1"),]), + pkg!("A" => [dep("P"),dep_req("c", "= 0.0.2"),]), + ]; + let reg = registry(input); + + resolve_and_validated(vec![dep("A")], ®, None).unwrap(); +} + +#[test] +#[should_panic(expected = "assertion failed: !name.is_empty()")] +fn test_dependency_with_empty_name() { + // Bug 5229, dependency-names must not be empty + "".to_dep(); +} + +#[test] +fn test_resolving_empty_dependency_list() { + let res = resolve(Vec::new(), ®istry(vec![])).unwrap(); + + assert_eq!(res, names(&["root"])); +} + +#[test] +fn test_resolving_only_package() { + let reg = registry(vec![pkg!("foo")]); + let res = resolve(vec![dep("foo")], ®).unwrap(); + assert_same(&res, &names(&["root", "foo"])); +} + +#[test] +fn test_resolving_one_dep() { + let reg = registry(vec![pkg!("foo"), pkg!("bar")]); + let res = resolve(vec![dep("foo")], ®).unwrap(); + assert_same(&res, &names(&["root", "foo"])); +} + +#[test] +fn test_resolving_multiple_deps() { + let reg = registry(vec![pkg!("foo"), pkg!("bar"), pkg!("baz")]); + let res = resolve(vec![dep("foo"), dep("baz")], ®).unwrap(); + assert_same(&res, &names(&["root", "foo", "baz"])); +} + +#[test] +fn test_resolving_transitive_deps() { + let reg = registry(vec![pkg!("foo"), pkg!("bar" => ["foo"])]); + let res = resolve(vec![dep("bar")], ®).unwrap(); + + assert_same(&res, &names(&["root", "foo", "bar"])); +} + +#[test] +fn test_resolving_common_transitive_deps() { + let reg = registry(vec![pkg!("foo" => ["bar"]), pkg!("bar")]); + let res = resolve(vec![dep("foo"), dep("bar")], ®).unwrap(); + + assert_same(&res, &names(&["root", "foo", "bar"])); +} + +#[test] +fn test_resolving_with_same_name() { + let list = vec![ + pkg_loc("foo", "https://first.example.com"), + pkg_loc("bar", "https://second.example.com"), + ]; + + let reg = registry(list); + let res = resolve( + vec![ + dep_loc("foo", "https://first.example.com"), + dep_loc("bar", "https://second.example.com"), + ], + ®, + ) + .unwrap(); + + let mut names = loc_names(&[ + ("foo", "https://first.example.com"), + ("bar", "https://second.example.com"), + ]); + + names.push(pkg_id("root")); + assert_same(&res, &names); +} + +#[test] +fn test_resolving_with_dev_deps() { + let reg = registry(vec![ + pkg!("foo" => ["bar", dep_kind("baz", Kind::Development)]), + pkg!("baz" => ["bat", dep_kind("bam", Kind::Development)]), + pkg!("bar"), + pkg!("bat"), + ]); + + let res = resolve(vec![dep("foo"), dep_kind("baz", Kind::Development)], ®).unwrap(); + + assert_same(&res, &names(&["root", "foo", "bar", "baz", "bat"])); +} + +#[test] +fn resolving_with_many_versions() { + let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]); + + let res = resolve(vec![dep("foo")], ®).unwrap(); + + assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.2")])); +} + +#[test] +fn resolving_with_specific_version() { + let reg = registry(vec![pkg!(("foo", "1.0.1")), pkg!(("foo", "1.0.2"))]); + + let res = resolve(vec![dep_req("foo", "=1.0.1")], ®).unwrap(); + + assert_same(&res, &names(&[("root", "1.0.0"), ("foo", "1.0.1")])); +} + +#[test] +fn test_resolving_maximum_version_with_transitive_deps() { + let reg = registry(vec![ + pkg!(("util", "1.2.2")), + pkg!(("util", "1.0.0")), + pkg!(("util", "1.1.1")), + pkg!("foo" => [dep_req("util", "1.0.0")]), + pkg!("bar" => [dep_req("util", ">=1.0.1")]), + ]); + + let res = resolve(vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], ®).unwrap(); + + assert_contains( + &res, + &names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("util", "1.2.2"), + ]), + ); + assert!(!res.contains(&("util", "1.0.1").to_pkgid())); + assert!(!res.contains(&("util", "1.1.1").to_pkgid())); +} + +#[test] +fn test_resolving_minimum_version_with_transitive_deps() { + enable_nightly_features(); // -Z minimal-versions + // When the minimal-versions config option is specified then the lowest + // possible version of a package should be selected. "util 1.0.0" can't be + // selected because of the requirements of "bar", so the minimum version + // must be 1.1.1. + let reg = registry(vec![ + pkg!(("util", "1.2.2")), + pkg!(("util", "1.0.0")), + pkg!(("util", "1.1.1")), + pkg!("foo" => [dep_req("util", "1.0.0")]), + pkg!("bar" => [dep_req("util", ">=1.0.1")]), + ]); + + let mut config = Config::default().unwrap(); + config + .configure( + 1, + None, + &None, + false, + false, + false, + &None, + &["minimal-versions".to_string()], + ) + .unwrap(); + + let res = resolve_with_config( + vec![dep_req("foo", "1.0.0"), dep_req("bar", "1.0.0")], + ®, + Some(&config), + ) + .unwrap(); + + assert_contains( + &res, + &names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("util", "1.1.1"), + ]), + ); + assert!(!res.contains(&("util", "1.2.2").to_pkgid())); + assert!(!res.contains(&("util", "1.0.0").to_pkgid())); +} + +#[test] +fn resolving_incompat_versions() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.2")), + pkg!("bar" => [dep_req("foo", "=1.0.2")]), + ]); + + assert!(resolve(vec![dep_req("foo", "=1.0.1"), dep("bar")], ®).is_err()); +} + +#[test] +fn resolving_wrong_case_from_registry() { + // In the future we may #5678 allow this to happen. + // For back compatibility reasons, we probably won't. + // But we may want to future prove ourselves by understanding it. + // This test documents the current behavior. + let reg = registry(vec![pkg!(("foo", "1.0.0")), pkg!("bar" => ["Foo"])]); + + assert!(resolve(vec![dep("bar")], ®).is_err()); +} + +#[test] +fn resolving_mis_hyphenated_from_registry() { + // In the future we may #2775 allow this to happen. + // For back compatibility reasons, we probably won't. + // But we may want to future prove ourselves by understanding it. + // This test documents the current behavior. + let reg = registry(vec![pkg!(("fo-o", "1.0.0")), pkg!("bar" => ["fo_o"])]); + + assert!(resolve(vec![dep("bar")], ®).is_err()); +} + +#[test] +fn resolving_backtrack() { + let reg = registry(vec![ + pkg!(("foo", "1.0.2") => [dep("bar")]), + pkg!(("foo", "1.0.1") => [dep("baz")]), + pkg!("bar" => [dep_req("foo", "=2.0.2")]), + pkg!("baz"), + ]); + + let res = resolve(vec![dep_req("foo", "^1")], ®).unwrap(); + + assert_contains( + &res, + &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("baz", "1.0.0")]), + ); +} + +#[test] +fn resolving_backtrack_features() { + // test for cargo/issues/4347 + let mut bad = dep("bar"); + bad.set_features(vec!["bad"]); + + let reg = registry(vec![ + pkg!(("foo", "1.0.2") => [bad]), + pkg!(("foo", "1.0.1") => [dep("bar")]), + pkg!("bar"), + ]); + + let res = resolve(vec![dep_req("foo", "^1")], ®).unwrap(); + + assert_contains( + &res, + &names(&[("root", "1.0.0"), ("foo", "1.0.1"), ("bar", "1.0.0")]), + ); +} + +#[test] +fn resolving_allows_multiple_compatible_versions() { + let reg = registry(vec![ + pkg!(("foo", "1.0.0")), + pkg!(("foo", "2.0.0")), + pkg!(("foo", "0.1.0")), + pkg!(("foo", "0.2.0")), + pkg!("bar" => ["d1", "d2", "d3", "d4"]), + pkg!("d1" => [dep_req("foo", "1")]), + pkg!("d2" => [dep_req("foo", "2")]), + pkg!("d3" => [dep_req("foo", "0.1")]), + pkg!("d4" => [dep_req("foo", "0.2")]), + ]); + + let res = resolve(vec![dep("bar")], ®).unwrap(); + + assert_same( + &res, + &names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("foo", "2.0.0"), + ("foo", "0.1.0"), + ("foo", "0.2.0"), + ("d1", "1.0.0"), + ("d2", "1.0.0"), + ("d3", "1.0.0"), + ("d4", "1.0.0"), + ("bar", "1.0.0"), + ]), + ); +} + +#[test] +fn resolving_with_deep_backtracking() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), + pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), + pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), + dep_req("other", "1")]), + pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), + pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), + pkg!(("baz", "1.0.1")), + pkg!(("dep_req", "1.0.0")), + pkg!(("dep_req", "2.0.0")), + ]); + + let res = resolve(vec![dep_req("foo", "1")], ®).unwrap(); + + assert_same( + &res, + &names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "2.0.0"), + ("baz", "1.0.1"), + ]), + ); +} + +#[test] +fn resolving_with_sys_crates() { + // This is based on issues/4902 + // With `l` a normal library we get 2copies so everyone gets the newest compatible. + // But `l-sys` a library with a links attribute we make sure there is only one. + let reg = registry(vec![ + pkg!(("l-sys", "0.9.1")), + pkg!(("l-sys", "0.10.0")), + pkg!(("l", "0.9.1")), + pkg!(("l", "0.10.0")), + pkg!(("d", "1.0.0") => [dep_req("l-sys", ">=0.8.0, <=0.10.0"), dep_req("l", ">=0.8.0, <=0.10.0")]), + pkg!(("r", "1.0.0") => [dep_req("l-sys", "0.9"), dep_req("l", "0.9")]), + ]); + + let res = resolve(vec![dep_req("d", "1"), dep_req("r", "1")], ®).unwrap(); + + assert_same( + &res, + &names(&[ + ("root", "1.0.0"), + ("d", "1.0.0"), + ("r", "1.0.0"), + ("l-sys", "0.9.1"), + ("l", "0.9.1"), + ("l", "0.10.0"), + ]), + ); +} + +#[test] +fn resolving_with_constrained_sibling_backtrack_parent() { + // There is no point in considering all of the backtrack_trap{1,2} + // candidates since they can't change the result of failing to + // resolve 'constrained'. Cargo should (ideally) skip past them and resume + // resolution once the activation of the parent, 'bar', is rolled back. + // Note that the traps are slightly more constrained to make sure they + // get picked first. + let mut reglist = vec![ + pkg!(("foo", "1.0.0") => [dep_req("bar", "1.0"), + dep_req("constrained", "=1.0.0")]), + pkg!(("bar", "1.0.0") => [dep_req("backtrack_trap1", "1.0.2"), + dep_req("backtrack_trap2", "1.0.2"), + dep_req("constrained", "1.0.0")]), + pkg!(("constrained", "1.0.0")), + pkg!(("backtrack_trap1", "1.0.0")), + pkg!(("backtrack_trap2", "1.0.0")), + ]; + // Bump this to make the test harder - it adds more versions of bar that will + // fail to resolve, and more versions of the traps to consider. + const NUM_BARS_AND_TRAPS: usize = 50; // minimum 2 + for i in 1..NUM_BARS_AND_TRAPS { + let vsn = format!("1.0.{}", i); + reglist.push( + pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"), + dep_req("backtrack_trap2", "1.0.2"), + dep_req("constrained", "1.0.1")]), + ); + reglist.push(pkg!(("backtrack_trap1", vsn.clone()))); + reglist.push(pkg!(("backtrack_trap2", vsn.clone()))); + reglist.push(pkg!(("constrained", vsn.clone()))); + } + let reg = registry(reglist); + + let res = resolve(vec![dep_req("foo", "1")], ®).unwrap(); + + assert_contains( + &res, + &names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("constrained", "1.0.0"), + ]), + ); +} + +#[test] +fn resolving_with_many_equivalent_backtracking() { + let mut reglist = Vec::new(); + + const DEPTH: usize = 200; + const BRANCHING_FACTOR: usize = 100; + + // Each level depends on the next but the last level does not exist. + // Without cashing we need to test every path to the last level O(BRANCHING_FACTOR ^ DEPTH) + // and this test will time out. With cashing we need to discover that none of these + // can be activated O(BRANCHING_FACTOR * DEPTH) + for l in 0..DEPTH { + let name = format!("level{}", l); + let next = format!("level{}", l + 1); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())])); + } + } + + let reg = registry(reglist.clone()); + + let res = resolve(vec![dep("level0")], ®); + + assert!(res.is_err()); + + // It is easy to write code that quickly returns an error. + // Lets make sure we can find a good answer if it is there. + reglist.push(pkg!(("level0", "1.0.0"))); + + let reg = registry(reglist.clone()); + + let res = resolve(vec![dep("level0")], ®).unwrap(); + + assert_contains(&res, &names(&[("root", "1.0.0"), ("level0", "1.0.0")])); + + // Make sure we have not special case no candidates. + reglist.push(pkg!(("constrained", "1.1.0"))); + reglist.push(pkg!(("constrained", "1.0.0"))); + reglist.push( + pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep_req("constrained", "=1.0.0")]), + ); + + let reg = registry(reglist.clone()); + + let res = resolve(vec![dep("level0"), dep("constrained")], ®).unwrap(); + + assert_contains( + &res, + &names(&[ + ("root", "1.0.0"), + ("level0", "1.0.0"), + ("constrained", "1.1.0"), + ]), + ); + + let reg = registry(reglist.clone()); + + let res = resolve(vec![dep_req("level0", "1.0.1"), dep("constrained")], ®).unwrap(); + + assert_contains( + &res, + &names(&[ + ("root", "1.0.0"), + (format!("level{}", DEPTH).as_str(), "1.0.0"), + ("constrained", "1.0.0"), + ]), + ); + + let reg = registry(reglist); + + let res = resolve( + vec![dep_req("level0", "1.0.1"), dep_req("constrained", "1.1.0")], + ®, + ); + + assert!(res.is_err()); +} + +#[test] +fn resolving_with_deep_traps() { + let mut reglist = Vec::new(); + + const DEPTH: usize = 200; + const BRANCHING_FACTOR: usize = 100; + + // Each backtrack_trap depends on the next, and adds a backtrack frame. + // None of witch is going to help with `bad`. + for l in 0..DEPTH { + let name = format!("backtrack_trap{}", l); + let next = format!("backtrack_trap{}", l + 1); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())])); + } + } + { + let name = format!("backtrack_trap{}", DEPTH); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()))); + } + } + { + // slightly less constrained to make sure `cloaking` gets picked last. + for i in 1..(BRANCHING_FACTOR + 10) { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!(("cloaking", vsn.as_str()) => [dep_req("bad", "1.0.1")])); + } + } + + let reg = registry(reglist); + + let res = resolve(vec![dep("backtrack_trap0"), dep("cloaking")], ®); + + assert!(res.is_err()); +} + +#[test] +fn resolving_with_constrained_cousins_backtrack() { + let mut reglist = Vec::new(); + + const DEPTH: usize = 100; + const BRANCHING_FACTOR: usize = 50; + + // Each backtrack_trap depends on the next. + // The last depends on a specific ver of constrained. + for l in 0..DEPTH { + let name = format!("backtrack_trap{}", l); + let next = format!("backtrack_trap{}", l + 1); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())])); + } + } + { + let name = format!("backtrack_trap{}", DEPTH); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push( + pkg!((name.as_str(), vsn.as_str()) => [dep_req("constrained", ">=1.1.0, <=2.0.0")]), + ); + } + } + { + // slightly less constrained to make sure `constrained` gets picked last. + for i in 0..(BRANCHING_FACTOR + 10) { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!(("constrained", vsn.as_str()))); + } + reglist.push(pkg!(("constrained", "1.1.0"))); + reglist.push(pkg!(("constrained", "2.0.0"))); + reglist.push(pkg!(("constrained", "2.0.1"))); + } + reglist.push(pkg!(("cloaking", "1.0.0") => [dep_req("constrained", "~1.0.0")])); + + let reg = registry(reglist.clone()); + + // `backtrack_trap0 = "*"` is a lot of ways of saying `constrained = ">=1.1.0, <=2.0.0"` + // but `constrained= "2.0.1"` is already picked. + // Only then to try and solve `constrained= "~1.0.0"` which is incompatible. + let res = resolve( + vec![ + dep("backtrack_trap0"), + dep_req("constrained", "2.0.1"), + dep("cloaking"), + ], + ®, + ); + + assert!(res.is_err()); + + // Each level depends on the next but the last depends on incompatible deps. + // Let's make sure that we can cache that a dep has incompatible deps. + for l in 0..DEPTH { + let name = format!("level{}", l); + let next = format!("level{}", l + 1); + for i in 1..BRANCHING_FACTOR { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!((name.as_str(), vsn.as_str()) => [dep(next.as_str())])); + } + } + reglist.push( + pkg!((format!("level{}", DEPTH).as_str(), "1.0.0") => [dep("backtrack_trap0"), + dep("cloaking") + ]), + ); + + let reg = registry(reglist); + + let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.1")], ®); + + assert!(res.is_err()); + + let res = resolve(vec![dep("level0"), dep_req("constrained", "2.0.0")], ®).unwrap(); + + assert_contains( + &res, + &names(&[("constrained", "2.0.0"), ("cloaking", "1.0.0")]), + ); +} + +#[test] +fn resolving_with_constrained_sibling_backtrack_activation() { + // It makes sense to resolve most-constrained deps first, but + // with that logic the backtrack traps here come between the two + // attempted resolutions of 'constrained'. When backtracking, + // cargo should skip past them and resume resolution once the + // number of activations for 'constrained' changes. + let mut reglist = vec![ + pkg!(("foo", "1.0.0") => [dep_req("bar", "=1.0.0"), + dep_req("backtrack_trap1", "1.0"), + dep_req("backtrack_trap2", "1.0"), + dep_req("constrained", "<=1.0.60")]), + pkg!(("bar", "1.0.0") => [dep_req("constrained", ">=1.0.60")]), + ]; + // Bump these to make the test harder, but you'll also need to + // change the version constraints on `constrained` above. To correctly + // exercise Cargo, the relationship between the values is: + // NUM_CONSTRAINED - vsn < NUM_TRAPS < vsn + // to make sure the traps are resolved between `constrained`. + const NUM_TRAPS: usize = 45; // min 1 + const NUM_CONSTRAINED: usize = 100; // min 1 + for i in 0..NUM_TRAPS { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!(("backtrack_trap1", vsn.clone()))); + reglist.push(pkg!(("backtrack_trap2", vsn.clone()))); + } + for i in 0..NUM_CONSTRAINED { + let vsn = format!("1.0.{}", i); + reglist.push(pkg!(("constrained", vsn.clone()))); + } + let reg = registry(reglist); + + let res = resolve(vec![dep_req("foo", "1")], ®).unwrap(); + + assert_contains( + &res, + &names(&[ + ("root", "1.0.0"), + ("foo", "1.0.0"), + ("bar", "1.0.0"), + ("constrained", "1.0.60"), + ]), + ); +} + +#[test] +fn resolving_with_constrained_sibling_transitive_dep_effects() { + // When backtracking due to a failed dependency, if Cargo is + // trying to be clever and skip irrelevant dependencies, care must + // be taken to not miss the transitive effects of alternatives. E.g. + // in the right-to-left resolution of the graph below, B may + // affect whether D is successfully resolved. + // + // A + // / | \ + // B C D + // | | + // C D + let reg = registry(vec![ + pkg!(("A", "1.0.0") => [dep_req("B", "1.0"), + dep_req("C", "1.0"), + dep_req("D", "1.0.100")]), + pkg!(("B", "1.0.0") => [dep_req("C", ">=1.0.0")]), + pkg!(("B", "1.0.1") => [dep_req("C", ">=1.0.1")]), + pkg!(("C", "1.0.0") => [dep_req("D", "1.0.0")]), + pkg!(("C", "1.0.1") => [dep_req("D", ">=1.0.1,<1.0.100")]), + pkg!(("C", "1.0.2") => [dep_req("D", ">=1.0.2,<1.0.100")]), + pkg!(("D", "1.0.0")), + pkg!(("D", "1.0.1")), + pkg!(("D", "1.0.2")), + pkg!(("D", "1.0.100")), + pkg!(("D", "1.0.101")), + pkg!(("D", "1.0.102")), + pkg!(("D", "1.0.103")), + pkg!(("D", "1.0.104")), + pkg!(("D", "1.0.105")), + ]); + + let res = resolve(vec![dep_req("A", "1")], ®).unwrap(); + + assert_same( + &res, + &names(&[ + ("root", "1.0.0"), + ("A", "1.0.0"), + ("B", "1.0.0"), + ("C", "1.0.0"), + ("D", "1.0.105"), + ]), + ); +} + +#[test] +fn incomplete_information_skipping() { + // When backtracking due to a failed dependency, if Cargo is + // trying to be clever and skip irrelevant dependencies, care must + // be taken to not miss the transitive effects of alternatives. + // Fuzzing discovered that for some reason cargo was skipping based + // on incomplete information in the following case: + // minimized bug found in: + // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9 + let input = vec![ + pkg!(("a", "1.0.0")), + pkg!(("a", "1.1.0")), + pkg!("b" => [dep("a")]), + pkg!(("c", "1.0.0")), + pkg!(("c", "1.1.0")), + pkg!("d" => [dep_req("c", "=1.0")]), + pkg!(("e", "1.0.0")), + pkg!(("e", "1.1.0") => [dep_req("c", "1.1")]), + pkg!("to_yank"), + pkg!(("f", "1.0.0") => [ + dep("to_yank"), + dep("d"), + ]), + pkg!(("f", "1.1.0") => [dep("d")]), + pkg!("g" => [ + dep("b"), + dep("e"), + dep("f"), + ]), + ]; + let reg = registry(input.clone()); + + let res = resolve(vec![dep("g")], ®).unwrap(); + let package_to_yank = "to_yank".to_pkgid(); + // this package is not used in the resolution. + assert!(!res.contains(&package_to_yank)); + // so when we yank it + let new_reg = registry( + input + .iter() + .cloned() + .filter(|x| package_to_yank != x.package_id()) + .collect(), + ); + assert_eq!(input.len(), new_reg.len() + 1); + // it should still build + assert!(resolve(vec![dep("g")], &new_reg).is_ok()); +} + +#[test] +fn incomplete_information_skipping_2() { + // When backtracking due to a failed dependency, if Cargo is + // trying to be clever and skip irrelevant dependencies, care must + // be taken to not miss the transitive effects of alternatives. + // Fuzzing discovered that for some reason cargo was skipping based + // on incomplete information in the following case: + // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9 + let input = vec![ + pkg!(("b", "3.8.10")), + pkg!(("b", "8.7.4")), + pkg!(("b", "9.4.6")), + pkg!(("c", "1.8.8")), + pkg!(("c", "10.2.5")), + pkg!(("d", "4.1.2") => [ + dep_req("bad", "=6.10.9"), + ]), + pkg!(("d", "5.5.6")), + pkg!(("d", "5.6.10")), + pkg!(("to_yank", "8.0.1")), + pkg!(("to_yank", "8.8.1")), + pkg!(("e", "4.7.8") => [ + dep_req("d", ">=5.5.6, <=5.6.10"), + dep_req("to_yank", "=8.0.1"), + ]), + pkg!(("e", "7.4.9") => [ + dep_req("bad", "=4.7.5"), + ]), + pkg!("f" => [ + dep_req("d", ">=4.1.2, <=5.5.6"), + ]), + pkg!("g" => [ + dep("bad"), + ]), + pkg!(("h", "3.8.3") => [ + dep("g"), + ]), + pkg!(("h", "6.8.3") => [ + dep("f"), + ]), + pkg!(("h", "8.1.9") => [ + dep_req("to_yank", "=8.8.1"), + ]), + pkg!("i" => [ + dep("b"), + dep("c"), + dep("e"), + dep("h"), + ]), + ]; + let reg = registry(input.clone()); + + let res = resolve(vec![dep("i")], ®).unwrap(); + let package_to_yank = ("to_yank", "8.8.1").to_pkgid(); + // this package is not used in the resolution. + assert!(!res.contains(&package_to_yank)); + // so when we yank it + let new_reg = registry( + input + .iter() + .cloned() + .filter(|x| package_to_yank != x.package_id()) + .collect(), + ); + assert_eq!(input.len(), new_reg.len() + 1); + // it should still build + assert!(resolve(vec![dep("i")], &new_reg).is_ok()); +} + +#[test] +fn incomplete_information_skipping_3() { + // When backtracking due to a failed dependency, if Cargo is + // trying to be clever and skip irrelevant dependencies, care must + // be taken to not miss the transitive effects of alternatives. + // Fuzzing discovered that for some reason cargo was skipping based + // on incomplete information in the following case: + // minimized bug found in: + // https://github.com/rust-lang/cargo/commit/003c29b0c71e5ea28fbe8e72c148c755c9f3f8d9 + let input = vec![ + pkg! {("to_yank", "3.0.3")}, + pkg! {("to_yank", "3.3.0")}, + pkg! {("to_yank", "3.3.1")}, + pkg! {("a", "3.3.0") => [ + dep_req("to_yank", "=3.0.3"), + ] }, + pkg! {("a", "3.3.2") => [ + dep_req("to_yank", "<=3.3.0"), + ] }, + pkg! {("b", "0.1.3") => [ + dep_req("a", "=3.3.0"), + ] }, + pkg! {("b", "2.0.2") => [ + dep_req("to_yank", "3.3.0"), + dep("a"), + ] }, + pkg! {("b", "2.3.3") => [ + dep_req("to_yank", "3.3.0"), + dep_req("a", "=3.3.0"), + ] }, + ]; + let reg = registry(input.clone()); + + let res = resolve(vec![dep("b")], ®).unwrap(); + let package_to_yank = ("to_yank", "3.0.3").to_pkgid(); + // this package is not used in the resolution. + assert!(!res.contains(&package_to_yank)); + // so when we yank it + let new_reg = registry( + input + .iter() + .cloned() + .filter(|x| package_to_yank != x.package_id()) + .collect(), + ); + assert_eq!(input.len(), new_reg.len() + 1); + // it should still build + assert!(resolve(vec![dep("b")], &new_reg).is_ok()); +} + +#[test] +fn resolving_but_no_exists() { + let reg = registry(vec![]); + + let res = resolve(vec![dep_req("foo", "1")], ®); + assert!(res.is_err()); + + assert_eq!( + res.err().unwrap().to_string(), + "no matching package named `foo` found\n\ + location searched: registry `https://example.com/`\n\ + required by package `root v1.0.0 (registry `https://example.com/`)`\ + " + ); +} + +#[test] +fn resolving_cycle() { + let reg = registry(vec![pkg!("foo" => ["foo"])]); + + let _ = resolve(vec![dep_req("foo", "1")], ®); +} + +#[test] +fn hard_equality() { + let reg = registry(vec![ + pkg!(("foo", "1.0.1")), + pkg!(("foo", "1.0.0")), + pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]), + ]); + + let res = resolve(vec![dep_req("bar", "1"), dep_req("foo", "=1.0.0")], ®).unwrap(); + + assert_same( + &res, + &names(&[("root", "1.0.0"), ("foo", "1.0.0"), ("bar", "1.0.0")]), + ); +} + +#[test] +fn large_conflict_cache() { + let mut input = vec![ + pkg!(("last", "0.0.0") => [dep("bad")]), // just to make sure last is less constrained + ]; + let mut root_deps = vec![dep("last")]; + const NUM_VERSIONS: u8 = 20; + for name in 0..=NUM_VERSIONS { + // a large number of conflicts can easily be generated by a sys crate. + let sys_name = format!("{}-sys", (b'a' + name) as char); + let in_len = input.len(); + input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&sys_name, "=0.0.0")])); + root_deps.push(dep_req(&sys_name, ">= 0.0.1")); + + // a large number of conflicts can also easily be generated by a major release version. + let plane_name = format!("{}", (b'a' + name) as char); + let in_len = input.len(); + input.push(pkg!(("last", format!("{}.0.0", in_len)) => [dep_req(&plane_name, "=1.0.0")])); + root_deps.push(dep_req(&plane_name, ">= 1.0.1")); + + for i in 0..=NUM_VERSIONS { + input.push(pkg!((&sys_name, format!("{}.0.0", i)))); + input.push(pkg!((&plane_name, format!("1.0.{}", i)))); + } + } + let reg = registry(input); + let _ = resolve(root_deps, ®); +} + +#[test] +fn off_by_one_bug() { + let input = vec![ + pkg!(("A-sys", "0.0.1")), + pkg!(("A-sys", "0.0.4")), + pkg!(("A-sys", "0.0.6")), + pkg!(("A-sys", "0.0.7")), + pkg!(("NA", "0.0.0") => [dep_req("A-sys", "<= 0.0.5"),]), + pkg!(("NA", "0.0.1") => [dep_req("A-sys", ">= 0.0.6, <= 0.0.8"),]), + pkg!(("a", "0.0.1")), + pkg!(("a", "0.0.2")), + pkg!(("aa", "0.0.0") => [dep_req("A-sys", ">= 0.0.4, <= 0.0.6"),dep_req("NA", "<= 0.0.0"),]), + pkg!(("f", "0.0.3") => [dep("NA"),dep_req("a", "<= 0.0.2"),dep("aa"),]), + ]; + + let reg = registry(input); + let _ = resolve_and_validated(vec![dep("f")], ®, None); +} + +#[test] +fn conflict_store_bug() { + let input = vec![ + pkg!(("A", "0.0.3")), + pkg!(("A", "0.0.5")), + pkg!(("A", "0.0.9") => [dep("bad"),]), + pkg!(("A", "0.0.10") => [dep("bad"),]), + pkg!(("L-sys", "0.0.1") => [dep("bad"),]), + pkg!(("L-sys", "0.0.5")), + pkg!(("R", "0.0.4") => [ + dep_req("L-sys", "= 0.0.5"), + ]), + pkg!(("R", "0.0.6")), + pkg!(("a-sys", "0.0.5")), + pkg!(("a-sys", "0.0.11")), + pkg!(("c", "0.0.12") => [ + dep_req("R", ">= 0.0.3, <= 0.0.4"), + ]), + pkg!(("c", "0.0.13") => [ + dep_req("a-sys", ">= 0.0.8, <= 0.0.11"), + ]), + pkg!(("c0", "0.0.6") => [ + dep_req("L-sys", "<= 0.0.2"), + ]), + pkg!(("c0", "0.0.10") => [ + dep_req("A", ">= 0.0.9, <= 0.0.10"), + dep_req("a-sys", "= 0.0.5"), + ]), + pkg!("j" => [ + dep_req("A", ">= 0.0.3, <= 0.0.5"), + dep_req("R", ">=0.0.4, <= 0.0.6"), + dep_req("c", ">= 0.0.9"), + dep_req("c0", ">= 0.0.6"), + ]), + ]; + + let reg = registry(input); + let _ = resolve_and_validated(vec![dep("j")], ®, None); +} + +#[test] +fn conflict_store_more_then_one_match() { + let input = vec![ + pkg!(("A", "0.0.0")), + pkg!(("A", "0.0.1")), + pkg!(("A-sys", "0.0.0")), + pkg!(("A-sys", "0.0.1")), + pkg!(("A-sys", "0.0.2")), + pkg!(("A-sys", "0.0.3")), + pkg!(("A-sys", "0.0.12")), + pkg!(("A-sys", "0.0.16")), + pkg!(("B-sys", "0.0.0")), + pkg!(("B-sys", "0.0.1")), + pkg!(("B-sys", "0.0.2") => [dep_req("A-sys", "= 0.0.12"),]), + pkg!(("BA-sys", "0.0.0") => [dep_req("A-sys","= 0.0.16"),]), + pkg!(("BA-sys", "0.0.1") => [dep("bad"),]), + pkg!(("BA-sys", "0.0.2") => [dep("bad"),]), + pkg!("nA" => [ + dep("A"), + dep_req("A-sys", "<= 0.0.3"), + dep("B-sys"), + dep("BA-sys"), + ]), + ]; + let reg = registry(input); + let _ = resolve_and_validated(vec![dep("nA")], ®, None); +} diff --git a/src/bin/bench.rs b/src/bin/bench.rs deleted file mode 100644 index e4b5cac0c2e..00000000000 --- a/src/bin/bench.rs +++ /dev/null @@ -1,103 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Human, Config}; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; - -#[derive(RustcDecodable)] -struct Options { - flag_no_run: bool, - flag_package: Option, - flag_jobs: Option, - flag_features: Vec, - flag_no_default_features: bool, - flag_target: Option, - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_lib: bool, - flag_bin: Vec, - flag_example: Vec, - flag_test: Vec, - flag_bench: Vec, - arg_args: Vec, -} - -pub const USAGE: &'static str = " -Execute all benchmarks of a local package - -Usage: - cargo bench [options] [--] [...] - -Options: - -h, --help Print this message - --lib Benchmark only this package's library - --bin NAME Benchmark only the specified binary - --example NAME Benchmark only the specified example - --test NAME Benchmark only the specified test target - --bench NAME Benchmark only the specified bench target - --no-run Compile, but don't run benchmarks - -p SPEC, --package SPEC Package to run benchmarks for - -j N, --jobs N The number of jobs to run in parallel - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to build benchmarks for - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -All of the trailing arguments are passed to the benchmark binaries generated -for filtering benchmarks and generally providing options configuring how they -run. - -If the --package argument is given, then SPEC is a package id specification -which indicates which package should be benchmarked. If it is not given, then -the current package is benchmarked. For more information on SPEC and its format, -see the `cargo help pkgid` command. - -The --jobs argument affects the building of the benchmark executable but does -not affect how many jobs are used when running the benchmarks. - -Compilation can be customized with the `bench` profile in the manifest. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - - let ops = ops::TestOptions { - no_run: options.flag_no_run, - compile_opts: ops::CompileOptions { - config: config, - jobs: options.flag_jobs, - target: options.flag_target.as_ref().map(|s| &s[..]), - features: &options.flag_features, - no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), - exec_engine: None, - release: true, - mode: ops::CompileMode::Bench, - filter: ops::CompileFilter::new(options.flag_lib, - &options.flag_bin, - &options.flag_test, - &options.flag_example, - &options.flag_bench), - target_rustc_args: None, - }, - }; - - let err = try!(ops::run_benches(&root, &ops, - &options.arg_args).map_err(|err| { - CliError::from_boxed(err, 101) - })); - match err { - None => Ok(None), - Some(err) => { - Err(match err.exit.as_ref().and_then(|c| c.code()) { - Some(i) => CliError::new("", i), - None => CliError::from_error(Human(err), 101) - }) - } - } -} diff --git a/src/bin/build.rs b/src/bin/build.rs deleted file mode 100644 index 984a0e752a3..00000000000 --- a/src/bin/build.rs +++ /dev/null @@ -1,90 +0,0 @@ -use std::env; - -use cargo::ops::CompileOptions; -use cargo::ops; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; -use cargo::util::{CliResult, CliError, Config}; - -#[derive(RustcDecodable)] -struct Options { - flag_package: Option, - flag_jobs: Option, - flag_features: Vec, - flag_no_default_features: bool, - flag_target: Option, - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_release: bool, - flag_lib: bool, - flag_bin: Vec, - flag_example: Vec, - flag_test: Vec, - flag_bench: Vec, -} - -pub const USAGE: &'static str = " -Compile a local package and all of its dependencies - -Usage: - cargo build [options] - -Options: - -h, --help Print this message - -p SPEC, --package SPEC Package to build - -j N, --jobs N The number of jobs to run in parallel - --lib Build only this package's library - --bin NAME Build only the specified binary - --example NAME Build only the specified example - --test NAME Build only the specified test target - --bench NAME Build only the specified benchmark target - --release Build artifacts in release mode, with optimizations - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to compile - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -If the --package argument is given, then SPEC is a package id specification -which indicates which package should be built. If it is not given, then the -current package is built. For more information on SPEC and its format, see the -`cargo help pkgid` command. - -Compilation can be configured via the use of profiles which are configured in -the manifest. The default profile for this command is `dev`, but passing -the --release flag will use the `release` profile instead. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - debug!("executing; cmd=cargo-build; args={:?}", - env::args().collect::>()); - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - - let opts = CompileOptions { - config: config, - jobs: options.flag_jobs, - target: options.flag_target.as_ref().map(|t| &t[..]), - features: &options.flag_features, - no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), - exec_engine: None, - mode: ops::CompileMode::Build, - release: options.flag_release, - filter: ops::CompileFilter::new(options.flag_lib, - &options.flag_bin, - &options.flag_test, - &options.flag_example, - &options.flag_bench), - target_rustc_args: None, - }; - - ops::compile(&root, &opts).map(|_| None).map_err(|err| { - CliError::from_boxed(err, 101) - }) -} diff --git a/src/bin/cargo.rs b/src/bin/cargo.rs deleted file mode 100644 index 3442357209f..00000000000 --- a/src/bin/cargo.rs +++ /dev/null @@ -1,317 +0,0 @@ -extern crate cargo; -extern crate env_logger; -extern crate git2_curl; -extern crate rustc_serialize; -extern crate toml; -#[macro_use] extern crate log; - -use std::collections::BTreeSet; -use std::env; -use std::fs; -use std::io; -use std::path::{PathBuf, Path}; -use std::process::Command; -use std::thread::Builder; - -use cargo::{execute_main_without_stdin, handle_error, shell}; -use cargo::core::MultiShell; -use cargo::util::{CliError, CliResult, lev_distance, Config}; - -#[derive(RustcDecodable)] -struct Flags { - flag_list: bool, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - arg_command: String, - arg_args: Vec, -} - -const USAGE: &'static str = " -Rust's package manager - -Usage: - cargo [...] - cargo [options] - -Options: - -h, --help Display this message - -V, --version Print version info and exit - --list List installed commands - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -Some common cargo commands are: - build Compile the current project - clean Remove the target directory - doc Build this project's and its dependencies' documentation - new Create a new cargo project - run Build and execute src/main.rs - test Run the tests - bench Run the benchmarks - update Update dependencies listed in Cargo.lock - search Search registry for crates - -See 'cargo help ' for more information on a specific command. -"; - -fn main() { - env_logger::init().unwrap(); - - // Right now the algorithm in cargo::core::resolve is pretty recursive and - // runs the risk of blowing the stack. Platforms tend to have different - // stack limits by default (I just witnessed 512K on OSX and 2MB on Linux) - // so to get a consistent experience just spawn ourselves with a large stack - // size. - let stack_size = env::var("CARGO_STACK_SIZE").ok() - .and_then(|s| s.parse().ok()) - .unwrap_or(8 * 1024 * 1024); // 8MB - Builder::new().stack_size(stack_size).spawn(|| { - execute_main_without_stdin(execute, true, USAGE) - }).unwrap().join().unwrap(); -} - -macro_rules! each_subcommand{ ($mac:ident) => ({ - $mac!(bench); - $mac!(build); - $mac!(clean); - $mac!(doc); - $mac!(fetch); - $mac!(generate_lockfile); - $mac!(git_checkout); - $mac!(help); - $mac!(locate_project); - $mac!(login); - $mac!(new); - $mac!(owner); - $mac!(package); - $mac!(pkgid); - $mac!(publish); - $mac!(read_manifest); - $mac!(run); - $mac!(rustc); - $mac!(search); - $mac!(test); - $mac!(update); - $mac!(verify_project); - $mac!(version); - $mac!(yank); -}) } - -/** - The top-level `cargo` command handles configuration and project location - because they are fundamental (and intertwined). Other commands can rely - on this top-level information. -*/ -fn execute(flags: Flags, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(flags.flag_verbose, flags.flag_quiet)); - try!(config.shell().set_color_config(flags.flag_color.as_ref().map(|s| &s[..]))); - - init_git_transports(config); - - if flags.flag_list { - println!("Installed Commands:"); - for command in list_commands().into_iter() { - println!(" {}", command); - }; - return Ok(None) - } - - let args = match &flags.arg_command[..] { - // For the commands `cargo` and `cargo help`, re-execute ourselves as - // `cargo -h` so we can go through the normal process of printing the - // help message. - "" | "help" if flags.arg_args.is_empty() => { - config.shell().set_verbose(true); - let args = &["cargo".to_string(), "-h".to_string()]; - let r = cargo::call_main_without_stdin(execute, config, USAGE, args, - false); - cargo::process_executed(r, &mut config.shell()); - return Ok(None) - } - - // For `cargo help -h` and `cargo help --help`, print out the help - // message for `cargo help` - "help" if flags.arg_args[0] == "-h" || - flags.arg_args[0] == "--help" => { - vec!["cargo".to_string(), "help".to_string(), "-h".to_string()] - } - - // For `cargo help foo`, print out the usage message for the specified - // subcommand by executing the command with the `-h` flag. - "help" => { - vec!["cargo".to_string(), flags.arg_args[0].clone(), - "-h".to_string()] - } - - // For all other invocations, we're of the form `cargo foo args...`. We - // use the exact environment arguments to preserve tokens like `--` for - // example. - _ => env::args().collect(), - }; - - macro_rules! cmd{ ($name:ident) => ( - if args[1] == stringify!($name).replace("_", "-") { - mod $name; - config.shell().set_verbose(true); - let r = cargo::call_main_without_stdin($name::execute, config, - $name::USAGE, - &args, - false); - cargo::process_executed(r, &mut config.shell()); - return Ok(None) - } - ) } - each_subcommand!(cmd); - - execute_subcommand(&args[1], &args, &mut config.shell()); - Ok(None) -} - -fn find_closest(cmd: &str) -> Option { - let cmds = list_commands(); - // Only consider candidates with a lev_distance of 3 or less so we don't - // suggest out-of-the-blue options. - let mut filtered = cmds.iter().map(|c| (lev_distance(&c, cmd), c)) - .filter(|&(d, _)| d < 4) - .collect::>(); - filtered.sort_by(|a, b| a.0.cmp(&b.0)); - - if filtered.len() == 0 { - None - } else { - Some(filtered[0].1.to_string()) - } -} - -fn execute_subcommand(cmd: &str, args: &[String], shell: &mut MultiShell) { - let command = match find_command(cmd) { - Some(command) => command, - None => { - let msg = match find_closest(cmd) { - Some(closest) => format!("No such subcommand\n\n\t\ - Did you mean `{}`?\n", closest), - None => "No such subcommand".to_string() - }; - return handle_error(CliError::new(&msg, 127), shell) - } - }; - match Command::new(&command).args(&args[1..]).status() { - Ok(ref status) if status.success() => {} - Ok(ref status) => { - match status.code() { - Some(code) => handle_error(CliError::new("", code), shell), - None => { - let msg = format!("subcommand failed with: {}", status); - handle_error(CliError::new(&msg, 101), shell) - } - } - } - Err(ref e) if e.kind() == io::ErrorKind::NotFound => { - handle_error(CliError::new("No such subcommand", 127), shell) - } - Err(err) => { - let msg = format!("Subcommand failed to run: {}", err); - handle_error(CliError::new(&msg, 127), shell) - } - } -} - -/// List all runnable commands. find_command should always succeed -/// if given one of returned command. -fn list_commands() -> BTreeSet { - let command_prefix = "cargo-"; - let mut commands = BTreeSet::new(); - for dir in list_command_directory().iter() { - let entries = match fs::read_dir(dir) { - Ok(entries) => entries, - _ => continue - }; - for entry in entries { - let entry = match entry { Ok(e) => e, Err(..) => continue }; - let entry = entry.path(); - let filename = match entry.file_name().and_then(|s| s.to_str()) { - Some(filename) => filename, - _ => continue - }; - if filename.starts_with(command_prefix) && - filename.ends_with(env::consts::EXE_SUFFIX) && - is_executable(&entry) { - let command = &filename[ - command_prefix.len().. - filename.len() - env::consts::EXE_SUFFIX.len()]; - commands.insert(command.to_string()); - } - } - } - - macro_rules! add_cmd{ ($cmd:ident) => ({ - commands.insert(stringify!($cmd).replace("_", "-")); - }) } - each_subcommand!(add_cmd); - commands -} - -#[cfg(unix)] -fn is_executable(path: &Path) -> bool { - use std::os::unix::prelude::*; - fs::metadata(path).map(|m| { - m.permissions().mode() & 0o001 == 0o001 - }).unwrap_or(false) -} -#[cfg(windows)] -fn is_executable(path: &Path) -> bool { - fs::metadata(path).map(|m| m.is_file()).unwrap_or(false) -} - -/// Get `Command` to run given command. -fn find_command(cmd: &str) -> Option { - let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); - let dirs = list_command_directory(); - let mut command_paths = dirs.iter().map(|dir| dir.join(&command_exe)); - command_paths.find(|path| fs::metadata(&path).is_ok()) -} - -/// List candidate locations where subcommands might be installed. -fn list_command_directory() -> Vec { - let mut dirs = vec![]; - if let Ok(mut path) = env::current_exe() { - path.pop(); - dirs.push(path.join("../lib/cargo")); - dirs.push(path); - } - if let Some(val) = env::var_os("PATH") { - dirs.extend(env::split_paths(&val)); - } - dirs -} - -fn init_git_transports(config: &Config) { - // Only use a custom transport if a proxy is configured, right now libgit2 - // doesn't support proxies and we have to use a custom transport in this - // case. The custom transport, however, is not as well battle-tested. - match cargo::ops::http_proxy_exists(config) { - Ok(true) => {} - _ => return - } - - let handle = match cargo::ops::http_handle(config) { - Ok(handle) => handle, - Err(..) => return, - }; - - // The unsafety of the registration function derives from two aspects: - // - // 1. This call must be synchronized with all other registration calls as - // well as construction of new transports. - // 2. The argument is leaked. - // - // We're clear on point (1) because this is only called at the start of this - // binary (we know what the state of the world looks like) and we're mostly - // clear on point (2) because we'd only free it after everything is done - // anyway - unsafe { - git2_curl::register(handle); - } -} diff --git a/src/bin/cargo/cli.rs b/src/bin/cargo/cli.rs new file mode 100644 index 00000000000..f5d507701aa --- /dev/null +++ b/src/bin/cargo/cli.rs @@ -0,0 +1,254 @@ +use clap; + +use clap::{AppSettings, Arg, ArgMatches}; + +use cargo::core::features; +use cargo::{self, CliResult, Config}; + +use super::commands; +use super::list_commands; +use crate::command_prelude::*; + +pub fn main(config: &mut Config) -> CliResult { + let args = match cli().get_matches_safe() { + Ok(args) => args, + Err(e) => { + if e.kind == clap::ErrorKind::UnrecognizedSubcommand { + // An unrecognized subcommand might be an external subcommand. + let cmd = &e.info.as_ref().unwrap()[0].to_owned(); + return super::execute_external_subcommand(config, cmd, &[cmd, "--help"]) + .map_err(|_| e.into()); + } else { + return Err(e.into()); + } + } + }; + + if args.value_of("unstable-features") == Some("help") { + println!( + " +Available unstable (nightly-only) flags: + + -Z avoid-dev-deps -- Avoid installing dev-dependencies if possible + -Z minimal-versions -- Install minimal dependency versions instead of maximum + -Z no-index-update -- Do not update the registry, avoids a network request for benchmarking + -Z unstable-options -- Allow the usage of unstable options such as --registry + -Z config-profile -- Read profiles from .cargo/config files + -Z install-upgrade -- `cargo install` will upgrade instead of failing + -Z cache-messages -- Cache compiler messages + +Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" + ); + if !features::nightly_features_allowed() { + println!( + "\nUnstable flags are only available on the nightly channel \ + of Cargo, but this is the `{}` channel.\n\ + {}", + features::channel(), + features::SEE_CHANNELS + ); + } + println!( + "\nSee https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ + for more information about these flags." + ); + return Ok(()); + } + + let is_verbose = args.occurrences_of("verbose") > 0; + if args.is_present("version") { + let version = get_version_string(is_verbose); + print!("{}", version); + return Ok(()); + } + + if let Some(code) = args.value_of("explain") { + let mut procss = config.load_global_rustc(None)?.process(); + procss.arg("--explain").arg(code).exec()?; + return Ok(()); + } + + if args.is_present("list") { + println!("Installed Commands:"); + for command in list_commands(config) { + match command { + CommandInfo::BuiltIn { name, about } => { + let summary = about.unwrap_or_default(); + let summary = summary.lines().next().unwrap_or(&summary); // display only the first line + println!(" {:<20} {}", name, summary) + } + CommandInfo::External { name, path } => { + if is_verbose { + println!(" {:<20} {}", name, path.display()) + } else { + println!(" {}", name) + } + } + } + } + return Ok(()); + } + + let args = expand_aliases(config, args)?; + + execute_subcommand(config, &args) +} + +pub fn get_version_string(is_verbose: bool) -> String { + let version = cargo::version(); + let mut version_string = version.to_string(); + version_string.push_str("\n"); + if is_verbose { + version_string.push_str(&format!( + "release: {}.{}.{}\n", + version.major, version.minor, version.patch + )); + if let Some(ref cfg) = version.cfg_info { + if let Some(ref ci) = cfg.commit_info { + version_string.push_str(&format!("commit-hash: {}\n", ci.commit_hash)); + version_string.push_str(&format!("commit-date: {}\n", ci.commit_date)); + } + } + } + version_string +} + +fn expand_aliases( + config: &mut Config, + args: ArgMatches<'static>, +) -> Result, CliError> { + if let (cmd, Some(args)) = args.subcommand() { + match ( + commands::builtin_exec(cmd), + super::aliased_command(config, cmd)?, + ) { + (Some(_), Some(_)) => { + // User alias conflicts with a built-in subcommand + config.shell().warn(format!( + "user-defined alias `{}` is ignored, because it is shadowed by a built-in command", + cmd, + ))?; + } + (_, Some(mut alias)) => { + alias.extend( + args.values_of("") + .unwrap_or_default() + .map(|s| s.to_string()), + ); + let args = cli() + .setting(AppSettings::NoBinaryName) + .get_matches_from_safe(alias)?; + return expand_aliases(config, args); + } + (_, None) => {} + } + }; + + Ok(args) +} + +fn execute_subcommand(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let (cmd, subcommand_args) = match args.subcommand() { + (cmd, Some(args)) => (cmd, args), + _ => { + cli().print_help()?; + return Ok(()); + } + }; + + let arg_target_dir = &subcommand_args.value_of_path("target-dir", config); + + config.configure( + args.occurrences_of("verbose") as u32, + if args.is_present("quiet") || subcommand_args.is_present("quiet") { + Some(true) + } else { + None + }, + &args.value_of("color").map(|s| s.to_string()), + args.is_present("frozen"), + args.is_present("locked"), + args.is_present("offline"), + arg_target_dir, + &args + .values_of_lossy("unstable-features") + .unwrap_or_default(), + )?; + + if let Some(exec) = commands::builtin_exec(cmd) { + return exec(config, subcommand_args); + } + + let mut ext_args: Vec<&str> = vec![cmd]; + ext_args.extend(subcommand_args.values_of("").unwrap_or_default()); + super::execute_external_subcommand(config, cmd, &ext_args) +} + +fn cli() -> App { + App::new("cargo") + .settings(&[ + AppSettings::UnifiedHelpMessage, + AppSettings::DeriveDisplayOrder, + AppSettings::VersionlessSubcommands, + AppSettings::AllowExternalSubcommands, + ]) + .template( + "\ +Rust's package manager + +USAGE: + {usage} + +OPTIONS: +{unified} + +Some common cargo commands are (see all commands with --list): + build Compile the current package + check Analyze the current package and report errors, but don't build object files + clean Remove the target directory + doc Build this package's and its dependencies' documentation + new Create a new cargo package + init Create a new cargo package in an existing directory + run Run a binary or example of the local package + test Run the tests + bench Run the benchmarks + update Update dependencies listed in Cargo.lock + search Search registry for crates + publish Package and upload this package to the registry + install Install a Rust binary. Default location is $HOME/.cargo/bin + uninstall Uninstall a Rust binary + +See 'cargo help ' for more information on a specific command.\n", + ) + .arg(opt("version", "Print version info and exit").short("V")) + .arg(opt("list", "List installed commands")) + .arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE")) + .arg( + opt( + "verbose", + "Use verbose output (-vv very verbose/build.rs output)", + ) + .short("v") + .multiple(true) + .global(true), + ) + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg( + opt("color", "Coloring: auto, always, never") + .value_name("WHEN") + .global(true), + ) + .arg(opt("frozen", "Require Cargo.lock and cache are up to date").global(true)) + .arg(opt("locked", "Require Cargo.lock is up to date").global(true)) + .arg(opt("offline", "Run without accessing the network").global(true)) + .arg( + Arg::with_name("unstable-features") + .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details") + .short("Z") + .value_name("FLAG") + .multiple(true) + .number_of_values(1) + .global(true), + ) + .subcommands(commands::builtin()) +} diff --git a/src/bin/cargo/commands/bench.rs b/src/bin/cargo/commands/bench.rs new file mode 100644 index 00000000000..e4d9959f075 --- /dev/null +++ b/src/bin/cargo/commands/bench.rs @@ -0,0 +1,98 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, TestOptions}; + +pub fn cli() -> App { + subcommand("bench") + .setting(AppSettings::TrailingVarArg) + .about("Execute all benchmarks of a local package") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg( + Arg::with_name("BENCHNAME") + .help("If specified, only run benches containing this string in their names"), + ) + .arg( + Arg::with_name("args") + .help("Arguments for the bench binary") + .multiple(true) + .last(true), + ) + .arg_targets_all( + "Benchmark only this package's library", + "Benchmark only the specified binary", + "Benchmark all binaries", + "Benchmark only the specified example", + "Benchmark all examples", + "Benchmark only the specified test target", + "Benchmark all tests", + "Benchmark only the specified bench target", + "Benchmark all benches", + "Benchmark all targets", + ) + .arg(opt("no-run", "Compile, but don't run benchmarks")) + .arg_package_spec( + "Package to run benchmarks for", + "Benchmark all packages in the workspace", + "Exclude packages from the benchmark", + ) + .arg_jobs() + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .arg(opt( + "no-fail-fast", + "Run all benchmarks regardless of failure", + )) + .after_help( + "\ +The benchmark filtering argument BENCHNAME and all the arguments following the +two dashes (`--`) are passed to the benchmark binaries and thus to libtest +(rustc's built in unit-test and micro-benchmarking framework). If you're +passing arguments to both Cargo and the binary, the ones after `--` go to the +binary, the ones before go to Cargo. For details about libtest's arguments see +the output of `cargo bench -- --help`. + +If the `--package` argument is given, then SPEC is a package ID specification +which indicates which package should be benchmarked. If it is not given, then +the current package is benchmarked. For more information on SPEC and its format, +see the `cargo help pkgid` command. + +All packages in the workspace are benchmarked if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +The `--jobs` argument affects the building of the benchmark executable but does +not affect how many jobs are used when running the benchmarks. + +Compilation can be customized with the `bench` profile in the manifest. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let mut compile_opts = args.compile_options(config, CompileMode::Bench, Some(&ws))?; + + compile_opts.build_config.release = true; + + let ops = TestOptions { + no_run: args.is_present("no-run"), + no_fail_fast: args.is_present("no-fail-fast"), + compile_opts, + }; + + let bench_args = args.value_of("BENCHNAME").into_iter(); + let bench_args = bench_args.chain(args.values_of("args").unwrap_or_default()); + let bench_args = bench_args.collect::>(); + + let err = ops::run_benches(&ws, &ops, &bench_args)?; + match err { + None => Ok(()), + Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) { + Some(i) => CliError::new(failure::format_err!("bench failed"), i), + None => CliError::new(err.into(), 101), + }), + } +} diff --git a/src/bin/cargo/commands/build.rs b/src/bin/cargo/commands/build.rs new file mode 100644 index 00000000000..ba83b7c1f02 --- /dev/null +++ b/src/bin/cargo/commands/build.rs @@ -0,0 +1,68 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("build") + // subcommand aliases are handled in aliased_command() + // .alias("b") + .about("Compile a local package and all of its dependencies") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_package_spec( + "Package to build (see `cargo help pkgid`)", + "Build all packages in the workspace", + "Exclude packages from the build", + ) + .arg_jobs() + .arg_targets_all( + "Build only this package's library", + "Build only the specified binary", + "Build all binaries", + "Build only the specified example", + "Build all examples", + "Build only the specified test target", + "Build all tests", + "Build only the specified bench target", + "Build all benches", + "Build all targets", + ) + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_target_dir() + .arg( + opt( + "out-dir", + "Copy final artifacts to this directory (unstable)", + ) + .value_name("PATH"), + ) + .arg_manifest_path() + .arg_message_format() + .arg_build_plan() + .after_help( + "\ +All packages in the workspace are built if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +Compilation can be configured via the use of profiles which are configured in +the manifest. The default profile for this command is `dev`, but passing +the --release flag will use the `release` profile instead. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let mut compile_opts = args.compile_options(config, CompileMode::Build, Some(&ws))?; + + compile_opts.export_dir = args.value_of_path("out-dir", config); + if compile_opts.export_dir.is_some() { + config + .cli_unstable() + .fail_if_stable_opt("--out-dir", 6790)?; + } + ops::compile(&ws, &compile_opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/check.rs b/src/bin/cargo/commands/check.rs new file mode 100644 index 00000000000..d0d5c6215bd --- /dev/null +++ b/src/bin/cargo/commands/check.rs @@ -0,0 +1,76 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("check") + // subcommand aliases are handled in aliased_command() + // .alias("c") + .about("Check a local package and all of its dependencies for errors") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_package_spec( + "Package(s) to check", + "Check all packages in the workspace", + "Exclude packages from the check", + ) + .arg_jobs() + .arg_targets_all( + "Check only this package's library", + "Check only the specified binary", + "Check all binaries", + "Check only the specified example", + "Check all examples", + "Check only the specified test target", + "Check all tests", + "Check only the specified bench target", + "Check all benches", + "Check all targets", + ) + .arg_release("Check artifacts in release mode, with optimizations") + .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE")) + .arg_features() + .arg_target_triple("Check for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +If the `--package` argument is given, then SPEC is a package ID specification +which indicates which package should be built. If it is not given, then the +current package is built. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are checked if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +Compilation can be configured via the use of profiles which are configured in +the manifest. The default profile for this command is `dev`, but passing +the `--release` flag will use the `release` profile instead. + +The `--profile test` flag can be used to check unit tests with the +`#[cfg(test)]` attribute. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let test = match args.value_of("profile") { + Some("test") => true, + None => false, + Some(profile) => { + let err = failure::format_err!( + "unknown profile: `{}`, only `test` is \ + currently supported", + profile + ); + return Err(CliError::new(err, 101)); + } + }; + let mode = CompileMode::Check { test }; + let compile_opts = args.compile_options(config, mode, Some(&ws))?; + + ops::compile(&ws, &compile_opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/clean.rs b/src/bin/cargo/commands/clean.rs new file mode 100644 index 00000000000..e336f73a031 --- /dev/null +++ b/src/bin/cargo/commands/clean.rs @@ -0,0 +1,36 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, CleanOptions}; + +pub fn cli() -> App { + subcommand("clean") + .about("Remove artifacts that cargo has generated in the past") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_package_spec_simple("Package to clean artifacts for") + .arg_manifest_path() + .arg_target_triple("Target triple to clean output for") + .arg_target_dir() + .arg_release("Whether or not to clean release artifacts") + .arg_doc("Whether or not to clean just the documentation directory") + .after_help( + "\ +If the `--package` argument is given, then SPEC is a package ID specification +which indicates which package's artifacts should be cleaned out. If it is not +given, then all packages' artifacts are removed. For more information on SPEC +and its format, see the `cargo help pkgid` command. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let opts = CleanOptions { + config, + spec: values(args, "package"), + target: args.target(), + release: args.is_present("release"), + doc: args.is_present("doc"), + }; + ops::clean(&ws, &opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/clippy.rs b/src/bin/cargo/commands/clippy.rs new file mode 100644 index 00000000000..c591acfacbc --- /dev/null +++ b/src/bin/cargo/commands/clippy.rs @@ -0,0 +1,78 @@ +use crate::command_prelude::*; + +use cargo::ops; +use cargo::util; + +pub fn cli() -> App { + subcommand("clippy-preview") + .about("Checks a package to catch common mistakes and improve your Rust code.") + .arg_package_spec( + "Package(s) to check", + "Check all packages in the workspace", + "Exclude packages from the check", + ) + .arg_jobs() + .arg_targets_all( + "Check only this package's library", + "Check only the specified binary", + "Check all binaries", + "Check only the specified example", + "Check all examples", + "Check only the specified test target", + "Check all tests", + "Check only the specified bench target", + "Check all benches", + "Check all targets", + ) + .arg_release("Check artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Check for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +If the `--package` argument is given, then SPEC is a package ID specification +which indicates which package should be built. If it is not given, then the +current package is built. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are checked if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +To allow or deny a lint from the command line you can use `cargo clippy --` +with: + + -W --warn OPT Set lint warnings + -A --allow OPT Set lint allowed + -D --deny OPT Set lint denied + -F --forbid OPT Set lint forbidden + +You can use tool lints to allow or deny lints from your code, eg.: + + #[allow(clippy::needless_lifetimes)] +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + + let mode = CompileMode::Check { test: false }; + let mut compile_opts = args.compile_options(config, mode, Some(&ws))?; + + if !config.cli_unstable().unstable_options { + return Err(failure::format_err!( + "`clippy-preview` is unstable, pass `-Z unstable-options` to enable it" + ) + .into()); + } + + let wrapper = util::process(util::config::clippy_driver()); + compile_opts.build_config.primary_unit_rustc = Some(wrapper); + compile_opts.build_config.force_rebuild = true; + + ops::compile(&ws, &compile_opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/doc.rs b/src/bin/cargo/commands/doc.rs new file mode 100644 index 00000000000..8405015dfbb --- /dev/null +++ b/src/bin/cargo/commands/doc.rs @@ -0,0 +1,66 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, DocOptions}; + +pub fn cli() -> App { + subcommand("doc") + .about("Build a package's documentation") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(opt( + "open", + "Opens the docs in a browser after the operation", + )) + .arg_package_spec( + "Package to document", + "Document all packages in the workspace", + "Exclude packages from the build", + ) + .arg(opt("no-deps", "Don't build documentation for dependencies")) + .arg(opt("document-private-items", "Document private items")) + .arg_jobs() + .arg_targets_lib_bin( + "Document only this package's library", + "Document only the specified binary", + "Document all binaries", + ) + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +By default the documentation for the local package and all dependencies is +built. The output is all placed in `target/doc` in rustdoc's usual format. + +All packages in the workspace are documented if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +If the `--package` argument is given, then SPEC is a package ID specification +which indicates which package should be documented. If it is not given, then the +current package is documented. For more information on SPEC and its format, see +the `cargo help pkgid` command. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let mode = CompileMode::Doc { + deps: !args.is_present("no-deps"), + }; + let mut compile_opts = args.compile_options(config, mode, Some(&ws))?; + compile_opts.local_rustdoc_args = if args.is_present("document-private-items") { + Some(vec!["--document-private-items".to_string()]) + } else { + None + }; + let doc_opts = DocOptions { + open_result: args.is_present("open"), + compile_opts, + }; + ops::doc(&ws, &doc_opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/fetch.rs b/src/bin/cargo/commands/fetch.rs new file mode 100644 index 00000000000..b07367bb65e --- /dev/null +++ b/src/bin/cargo/commands/fetch.rs @@ -0,0 +1,35 @@ +use crate::command_prelude::*; + +use cargo::ops; +use cargo::ops::FetchOptions; + +pub fn cli() -> App { + subcommand("fetch") + .about("Fetch dependencies of a package from the network") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_manifest_path() + .arg_target_triple("Fetch dependencies for the target triple") + .after_help( + "\ +If a lock file is available, this command will ensure that all of the Git +dependencies and/or registries dependencies are downloaded and locally +available. The network is never touched after a `cargo fetch` unless +the lock file changes. + +If the lock file is not available, then this is the equivalent of +`cargo generate-lockfile`. A lock file is generated and dependencies are also +all updated. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + + let opts = FetchOptions { + config, + target: args.target(), + }; + ops::fetch(&ws, &opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/fix.rs b/src/bin/cargo/commands/fix.rs new file mode 100644 index 00000000000..471d05a4c95 --- /dev/null +++ b/src/bin/cargo/commands/fix.rs @@ -0,0 +1,178 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, CompileFilter, FilterRule, LibRule}; + +pub fn cli() -> App { + subcommand("fix") + .about("Automatically fix lint warnings reported by rustc") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_package_spec( + "Package(s) to fix", + "Fix all packages in the workspace", + "Exclude packages from the fixes", + ) + .arg_jobs() + .arg_targets_all( + "Fix only this package's library", + "Fix only the specified binary", + "Fix all binaries", + "Fix only the specified example", + "Fix all examples", + "Fix only the specified test target", + "Fix all tests", + "Fix only the specified bench target", + "Fix all benches", + "Fix all targets (default)", + ) + .arg_release("Fix artifacts in release mode, with optimizations") + .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE")) + .arg_features() + .arg_target_triple("Fix for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .arg( + Arg::with_name("broken-code") + .long("broken-code") + .help("Fix code even if it already has compiler errors"), + ) + .arg( + Arg::with_name("edition") + .long("edition") + .help("Fix in preparation for the next edition"), + ) + .arg( + // This is a deprecated argument, we'll want to phase it out + // eventually. + Arg::with_name("prepare-for") + .long("prepare-for") + .help("Fix warnings in preparation of an edition upgrade") + .takes_value(true) + .possible_values(&["2018"]) + .conflicts_with("edition") + .hidden(true), + ) + .arg( + Arg::with_name("idioms") + .long("edition-idioms") + .help("Fix warnings to migrate to the idioms of an edition"), + ) + .arg( + Arg::with_name("allow-no-vcs") + .long("allow-no-vcs") + .help("Fix code even if a VCS was not detected"), + ) + .arg( + Arg::with_name("allow-dirty") + .long("allow-dirty") + .help("Fix code even if the working directory is dirty"), + ) + .arg( + Arg::with_name("allow-staged") + .long("allow-staged") + .help("Fix code even if the working directory has staged changes"), + ) + .arg( + Arg::with_name("clippy") + .long("clippy") + .help("Get fix suggestions from clippy instead of rustc") + .hidden(true) + .multiple(true) + .min_values(0) + .number_of_values(1), + ) + .after_help( + "\ +This Cargo subcommand will automatically take rustc's suggestions from +diagnostics like warnings and apply them to your source code. This is intended +to help automate tasks that rustc itself already knows how to tell you to fix! +The `cargo fix` subcommand is also being developed for the Rust 2018 edition +to provide code the ability to easily opt-in to the new edition without having +to worry about any breakage. + +Executing `cargo fix` will under the hood execute `cargo check`. Any warnings +applicable to your crate will be automatically fixed (if possible) and all +remaining warnings will be displayed when the check process is finished. For +example if you'd like to prepare for the 2018 edition, you can do so by +executing: + + cargo fix --edition + +which behaves the same as `cargo check --all-targets`. Similarly if you'd like +to fix code for different platforms you can do: + + cargo fix --edition --target x86_64-pc-windows-gnu + +or if your crate has optional features: + + cargo fix --edition --no-default-features --features foo + +If you encounter any problems with `cargo fix` or otherwise have any questions +or feature requests please don't hesitate to file an issue at +https://github.com/rust-lang/cargo +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let test = match args.value_of("profile") { + Some("test") => true, + None => false, + Some(profile) => { + let err = failure::format_err!( + "unknown profile: `{}`, only `test` is \ + currently supported", + profile + ); + return Err(CliError::new(err, 101)); + } + }; + let mode = CompileMode::Check { test }; + + // Unlike other commands default `cargo fix` to all targets to fix as much + // code as we can. + let mut opts = args.compile_options(config, mode, Some(&ws))?; + + let use_clippy = args.is_present("clippy"); + + let clippy_args = args + .value_of("clippy") + .map(|s| s.split(' ').map(|s| s.to_string()).collect()) + .or_else(|| Some(vec![])) + .filter(|_| use_clippy); + + if use_clippy && !config.cli_unstable().unstable_options { + return Err(failure::format_err!( + "`cargo fix --clippy` is unstable, pass `-Z unstable-options` to enable it" + ) + .into()); + } + + if let CompileFilter::Default { .. } = opts.filter { + opts.filter = CompileFilter::Only { + all_targets: true, + lib: LibRule::Default, + bins: FilterRule::All, + examples: FilterRule::All, + benches: FilterRule::All, + tests: FilterRule::All, + } + } + + ops::fix( + &ws, + &mut ops::FixOptions { + edition: args.is_present("edition"), + prepare_for: args.value_of("prepare-for"), + idioms: args.is_present("idioms"), + compile_opts: opts, + allow_dirty: args.is_present("allow-dirty"), + allow_no_vcs: args.is_present("allow-no-vcs"), + allow_staged: args.is_present("allow-staged"), + broken_code: args.is_present("broken-code"), + clippy_args, + }, + )?; + Ok(()) +} diff --git a/src/bin/cargo/commands/generate_lockfile.rs b/src/bin/cargo/commands/generate_lockfile.rs new file mode 100644 index 00000000000..d18c5668303 --- /dev/null +++ b/src/bin/cargo/commands/generate_lockfile.rs @@ -0,0 +1,16 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("generate-lockfile") + .about("Generate the lockfile for a package") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_manifest_path() +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + ops::generate_lockfile(&ws)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/git_checkout.rs b/src/bin/cargo/commands/git_checkout.rs new file mode 100644 index 00000000000..ccabbb2ede8 --- /dev/null +++ b/src/bin/cargo/commands/git_checkout.rs @@ -0,0 +1,37 @@ +use crate::command_prelude::*; + +use cargo::core::{GitReference, Source, SourceId}; +use cargo::sources::GitSource; +use cargo::util::IntoUrl; + +pub fn cli() -> App { + subcommand("git-checkout") + .about("Checkout a copy of a Git repository") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg( + Arg::with_name("url") + .long("url") + .value_name("URL") + .required(true), + ) + .arg( + Arg::with_name("reference") + .long("reference") + .value_name("REF") + .required(true), + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let url = args.value_of("url").unwrap().into_url()?; + let reference = args.value_of("reference").unwrap(); + + let reference = GitReference::Branch(reference.to_string()); + let source_id = SourceId::for_git(&url, reference)?; + + let mut source = GitSource::new(source_id, config)?; + + source.update()?; + + Ok(()) +} diff --git a/src/bin/cargo/commands/init.rs b/src/bin/cargo/commands/init.rs new file mode 100644 index 00000000000..644cec1f88b --- /dev/null +++ b/src/bin/cargo/commands/init.rs @@ -0,0 +1,21 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("init") + .about("Create a new cargo package in an existing directory") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("path").default_value(".")) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) + .arg_new_opts() +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let opts = args.new_options(config)?; + ops::init(&opts, config)?; + config + .shell() + .status("Created", format!("{} package", opts.kind))?; + Ok(()) +} diff --git a/src/bin/cargo/commands/install.rs b/src/bin/cargo/commands/install.rs new file mode 100644 index 00000000000..5dd0c49e093 --- /dev/null +++ b/src/bin/cargo/commands/install.rs @@ -0,0 +1,177 @@ +use crate::command_prelude::*; + +use cargo::core::{GitReference, SourceId}; +use cargo::ops; +use cargo::util::IntoUrl; + +pub fn cli() -> App { + subcommand("install") + .about("Install a Rust binary. Default location is $HOME/.cargo/bin") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("crate").empty_values(false).multiple(true)) + .arg( + opt("version", "Specify a version to install") + .alias("vers") + .value_name("VERSION") + .requires("crate"), + ) + .arg( + opt("git", "Git URL to install the specified crate from") + .value_name("URL") + .conflicts_with_all(&["path", "registry"]), + ) + .arg( + opt("branch", "Branch to use when installing from git") + .value_name("BRANCH") + .requires("git"), + ) + .arg( + opt("tag", "Tag to use when installing from git") + .value_name("TAG") + .requires("git"), + ) + .arg( + opt("rev", "Specific commit to use when installing from git") + .value_name("SHA") + .requires("git"), + ) + .arg( + opt("path", "Filesystem path to local crate to install") + .value_name("PATH") + .conflicts_with_all(&["git", "registry"]), + ) + .arg(opt( + "list", + "list all installed packages and their versions", + )) + .arg_jobs() + .arg(opt("force", "Force overwriting existing crates or binaries").short("f")) + .arg(opt( + "no-track", + "Do not save tracking information (unstable)", + )) + .arg_features() + .arg(opt("debug", "Build in debug mode instead of release mode")) + .arg_targets_bins_examples( + "Install only the specified binary", + "Install all binaries", + "Install only the specified example", + "Install all examples", + ) + .arg_target_triple("Build for the target triple") + .arg(opt("root", "Directory to install packages into").value_name("DIR")) + .arg( + opt("registry", "Registry to use") + .value_name("REGISTRY") + .requires("crate") + .conflicts_with_all(&["git", "path"]), + ) + .after_help( + "\ +This command manages Cargo's local set of installed binary crates. Only +packages which have executable [[bin]] or [[example]] targets can be +installed, and all executables are installed into the installation root's +`bin` folder. The installation root is determined, in order of precedence, by +`--root`, `$CARGO_INSTALL_ROOT`, the `install.root` configuration key, and +finally the home directory (which is either `$CARGO_HOME` if set or +`$HOME/.cargo` by default). + +There are multiple sources from which a crate can be installed. The default +location is crates.io but the `--git`, `--path`, and `--registry` flags can +change this source. If the source contains more than one package (such as +crates.io or a git repository with multiple crates) the `` argument is +required to indicate which crate should be installed. + +Crates from crates.io can optionally specify the version they wish to install +via the `--version` flags, and similarly packages from git repositories can +optionally specify the branch, tag, or revision that should be installed. If a +crate has multiple binaries, the `--bin` argument can selectively install only +one of them, and if you'd rather install examples the `--example` argument can +be used as well. + +By default cargo will refuse to overwrite existing binaries. The `--force` flag +enables overwriting existing binaries. Thus you can reinstall a crate with +`cargo install --force `. + +Omitting the specification entirely will install the crate in the +current directory. This behaviour is deprecated, and it no longer works in the +Rust 2018 edition. Use the more explicit `install --path .` instead. + +If the source is crates.io or `--git` then by default the crate will be built +in a temporary target directory. To avoid this, the target directory can be +specified by setting the `CARGO_TARGET_DIR` environment variable to a relative +path. In particular, this can be useful for caching build artifacts on +continuous integration systems.", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let registry = args.registry(config)?; + + if let Some(path) = args.value_of_path("path", config) { + config.reload_rooted_at(path)?; + } else { + config.reload_rooted_at(config.home().clone().into_path_unlocked())?; + } + + let workspace = args.workspace(config).ok(); + let mut compile_opts = args.compile_options(config, CompileMode::Build, workspace.as_ref())?; + + compile_opts.build_config.release = !args.is_present("debug"); + + let krates = args + .values_of("crate") + .unwrap_or_default() + .collect::>(); + + let mut from_cwd = false; + + let source = if let Some(url) = args.value_of("git") { + let url = url.into_url()?; + let gitref = if let Some(branch) = args.value_of("branch") { + GitReference::Branch(branch.to_string()) + } else if let Some(tag) = args.value_of("tag") { + GitReference::Tag(tag.to_string()) + } else if let Some(rev) = args.value_of("rev") { + GitReference::Rev(rev.to_string()) + } else { + GitReference::Branch("master".to_string()) + }; + SourceId::for_git(&url, gitref)? + } else if let Some(path) = args.value_of_path("path", config) { + SourceId::for_path(&path)? + } else if krates.is_empty() { + from_cwd = true; + SourceId::for_path(config.cwd())? + } else if let Some(registry) = registry { + SourceId::alt_registry(config, ®istry)? + } else { + SourceId::crates_io(config)? + }; + + let version = args.value_of("version"); + let root = args.value_of("root"); + + if args.is_present("no-track") && !config.cli_unstable().install_upgrade { + return Err(failure::format_err!( + "`--no-track` flag is unstable, pass `-Z install-upgrade` to enable it" + ) + .into()); + }; + + if args.is_present("list") { + ops::install_list(root, config)?; + } else { + ops::install( + root, + krates, + source, + from_cwd, + version, + &compile_opts, + args.is_present("force"), + args.is_present("no-track"), + )?; + } + Ok(()) +} diff --git a/src/bin/cargo/commands/locate_project.rs b/src/bin/cargo/commands/locate_project.rs new file mode 100644 index 00000000000..a48e387b411 --- /dev/null +++ b/src/bin/cargo/commands/locate_project.rs @@ -0,0 +1,35 @@ +use crate::command_prelude::*; + +use cargo::print_json; +use serde::Serialize; + +pub fn cli() -> App { + subcommand("locate-project") + .about("Print a JSON representation of a Cargo.toml file's location") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_manifest_path() +} + +#[derive(Serialize)] +pub struct ProjectLocation<'a> { + root: &'a str, +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let root = args.root_manifest(config)?; + + let root = root + .to_str() + .ok_or_else(|| { + failure::format_err!( + "your package path contains characters \ + not representable in Unicode" + ) + }) + .map_err(|e| CliError::new(e, 1))?; + + let location = ProjectLocation { root }; + + print_json(&location); + Ok(()) +} diff --git a/src/bin/cargo/commands/login.rs b/src/bin/cargo/commands/login.rs new file mode 100644 index 00000000000..7025433a90b --- /dev/null +++ b/src/bin/cargo/commands/login.rs @@ -0,0 +1,28 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("login") + .about( + "Save an api token from the registry locally. \ + If token is not specified, it will be read from stdin.", + ) + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("token")) + .arg( + opt("host", "Host to set the token for") + .value_name("HOST") + .hidden(true), + ) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + ops::registry_login( + config, + args.value_of("token").map(String::from), + args.value_of("registry").map(String::from), + )?; + Ok(()) +} diff --git a/src/bin/cargo/commands/metadata.rs b/src/bin/cargo/commands/metadata.rs new file mode 100644 index 00000000000..a6deee86717 --- /dev/null +++ b/src/bin/cargo/commands/metadata.rs @@ -0,0 +1,53 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, OutputMetadataOptions}; +use cargo::print_json; + +pub fn cli() -> App { + subcommand("metadata") + .about( + "Output the resolved dependencies of a package, \ + the concrete used versions including overrides, \ + in machine-readable format", + ) + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_features() + .arg(opt( + "no-deps", + "Output information only about the root package \ + and don't fetch dependencies", + )) + .arg_manifest_path() + .arg( + opt("format-version", "Format version") + .value_name("VERSION") + .possible_value("1"), + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + + let version = match args.value_of("format-version") { + None => { + config.shell().warn( + "please specify `--format-version` flag explicitly \ + to avoid compatibility problems", + )?; + 1 + } + Some(version) => version.parse().unwrap(), + }; + + let options = OutputMetadataOptions { + features: values(args, "features"), + all_features: args.is_present("all-features"), + no_default_features: args.is_present("no-default-features"), + no_deps: args.is_present("no-deps"), + version, + }; + + let result = ops::output_metadata(&ws, &options)?; + print_json(&result); + Ok(()) +} diff --git a/src/bin/cargo/commands/mod.rs b/src/bin/cargo/commands/mod.rs new file mode 100644 index 00000000000..16e02774db3 --- /dev/null +++ b/src/bin/cargo/commands/mod.rs @@ -0,0 +1,110 @@ +use crate::command_prelude::*; + +pub fn builtin() -> Vec { + vec![ + bench::cli(), + build::cli(), + check::cli(), + clean::cli(), + clippy::cli(), + doc::cli(), + fetch::cli(), + fix::cli(), + generate_lockfile::cli(), + git_checkout::cli(), + init::cli(), + install::cli(), + locate_project::cli(), + login::cli(), + metadata::cli(), + new::cli(), + owner::cli(), + package::cli(), + pkgid::cli(), + publish::cli(), + read_manifest::cli(), + run::cli(), + rustc::cli(), + rustdoc::cli(), + search::cli(), + test::cli(), + uninstall::cli(), + update::cli(), + vendor::cli(), + verify_project::cli(), + version::cli(), + yank::cli(), + ] +} + +pub fn builtin_exec(cmd: &str) -> Option) -> CliResult> { + let f = match cmd { + "bench" => bench::exec, + "build" => build::exec, + "check" => check::exec, + "clean" => clean::exec, + "clippy-preview" => clippy::exec, + "doc" => doc::exec, + "fetch" => fetch::exec, + "fix" => fix::exec, + "generate-lockfile" => generate_lockfile::exec, + "git-checkout" => git_checkout::exec, + "init" => init::exec, + "install" => install::exec, + "locate-project" => locate_project::exec, + "login" => login::exec, + "metadata" => metadata::exec, + "new" => new::exec, + "owner" => owner::exec, + "package" => package::exec, + "pkgid" => pkgid::exec, + "publish" => publish::exec, + "read-manifest" => read_manifest::exec, + "run" => run::exec, + "rustc" => rustc::exec, + "rustdoc" => rustdoc::exec, + "search" => search::exec, + "test" => test::exec, + "uninstall" => uninstall::exec, + "update" => update::exec, + "vendor" => vendor::exec, + "verify-project" => verify_project::exec, + "version" => version::exec, + "yank" => yank::exec, + _ => return None, + }; + Some(f) +} + +pub mod bench; +pub mod build; +pub mod check; +pub mod clean; +pub mod clippy; +pub mod doc; +pub mod fetch; +pub mod fix; +pub mod generate_lockfile; +pub mod git_checkout; +pub mod init; +pub mod install; +pub mod locate_project; +pub mod login; +pub mod metadata; +pub mod new; +pub mod owner; +pub mod package; +pub mod pkgid; +pub mod publish; +pub mod read_manifest; +pub mod run; +pub mod rustc; +pub mod rustdoc; +pub mod search; +pub mod test; +pub mod uninstall; +pub mod update; +pub mod vendor; +pub mod verify_project; +pub mod version; +pub mod yank; diff --git a/src/bin/cargo/commands/new.rs b/src/bin/cargo/commands/new.rs new file mode 100644 index 00000000000..0bb180aab7b --- /dev/null +++ b/src/bin/cargo/commands/new.rs @@ -0,0 +1,29 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("new") + .about("Create a new cargo package at ") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("path").required(true)) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) + .arg_new_opts() +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let opts = args.new_options(config)?; + + ops::new(&opts, config)?; + let path = args.value_of("path").unwrap(); + let package_name = if let Some(name) = args.value_of("name") { + name + } else { + path + }; + config.shell().status( + "Created", + format!("{} `{}` package", opts.kind, package_name), + )?; + Ok(()) +} diff --git a/src/bin/cargo/commands/owner.rs b/src/bin/cargo/commands/owner.rs new file mode 100644 index 00000000000..e9d5d85213b --- /dev/null +++ b/src/bin/cargo/commands/owner.rs @@ -0,0 +1,58 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, OwnersOptions}; + +pub fn cli() -> App { + subcommand("owner") + .about("Manage the owners of a crate on the registry") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("crate")) + .arg( + multi_opt( + "add", + "LOGIN", + "Name of a user or team to invite as an owner", + ) + .short("a"), + ) + .arg( + multi_opt( + "remove", + "LOGIN", + "Name of a user or team to remove as an owner", + ) + .short("r"), + ) + .arg(opt("list", "List owners of a crate").short("l")) + .arg(opt("index", "Registry index to modify owners for").value_name("INDEX")) + .arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) + .after_help( + "\ +This command will modify the owners for a crate on the specified registry (or +default). Owners of a crate can upload new versions and yank old versions. +Explicitly named owners can also modify the set of owners, so take care! + + See https://doc.rust-lang.org/cargo/reference/publishing.html#cargo-owner + for detailed documentation and troubleshooting.", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let registry = args.registry(config)?; + let opts = OwnersOptions { + krate: args.value_of("crate").map(|s| s.to_string()), + token: args.value_of("token").map(|s| s.to_string()), + index: args.value_of("index").map(|s| s.to_string()), + to_add: args + .values_of("add") + .map(|xs| xs.map(|s| s.to_string()).collect()), + to_remove: args + .values_of("remove") + .map(|xs| xs.map(|s| s.to_string()).collect()), + list: args.is_present("list"), + registry, + }; + ops::modify_owners(config, &opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/package.rs b/src/bin/cargo/commands/package.rs new file mode 100644 index 00000000000..772ea21a2c0 --- /dev/null +++ b/src/bin/cargo/commands/package.rs @@ -0,0 +1,53 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, PackageOpts}; + +pub fn cli() -> App { + subcommand("package") + .about("Assemble the local package into a distributable tarball") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg( + opt( + "list", + "Print files included in a package without making one", + ) + .short("l"), + ) + .arg(opt( + "no-verify", + "Don't verify the contents by building them", + )) + .arg(opt( + "no-metadata", + "Ignore warnings about a lack of human-usable metadata", + )) + .arg(opt( + "allow-dirty", + "Allow dirty working directories to be packaged", + )) + .arg_target_triple("Build for the target triple") + .arg_target_dir() + .arg_features() + .arg_manifest_path() + .arg_jobs() +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + ops::package( + &ws, + &PackageOpts { + config, + verify: !args.is_present("no-verify"), + list: args.is_present("list"), + check_metadata: !args.is_present("no-metadata"), + allow_dirty: args.is_present("allow-dirty"), + target: args.target(), + jobs: args.jobs()?, + features: args._values_of("features"), + all_features: args.is_present("all-features"), + no_default_features: args.is_present("no-default-features"), + }, + )?; + Ok(()) +} diff --git a/src/bin/cargo/commands/pkgid.rs b/src/bin/cargo/commands/pkgid.rs new file mode 100644 index 00000000000..57be0d11877 --- /dev/null +++ b/src/bin/cargo/commands/pkgid.rs @@ -0,0 +1,42 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("pkgid") + .about("Print a fully qualified package specification") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("spec")) + .arg_package("Argument to get the package ID specifier for") + .arg_manifest_path() + .after_help( + "\ +Given a argument, print out the fully qualified package ID specifier. +This command will generate an error if is ambiguous as to which package +it refers to in the dependency graph. If no is given, then the pkgid for +the local package is printed. + +This command requires that a lockfile is available and dependencies have been +fetched. + +Example Package IDs + + pkgid | name | version | url + |-----------------------------|--------|-----------|---------------------| + foo | foo | * | * + foo:1.2.3 | foo | 1.2.3 | * + crates.io/foo | foo | * | *://crates.io/foo + crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo + crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar + https://crates.io/foo#1.2.3 | foo | 1.2.3 | https://crates.io/foo +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let spec = args.value_of("spec").or_else(|| args.value_of("package")); + let spec = ops::pkgid(&ws, spec)?; + println!("{}", spec); + Ok(()) +} diff --git a/src/bin/cargo/commands/publish.rs b/src/bin/cargo/commands/publish.rs new file mode 100644 index 00000000000..be5dcffddf4 --- /dev/null +++ b/src/bin/cargo/commands/publish.rs @@ -0,0 +1,51 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, PublishOpts}; + +pub fn cli() -> App { + subcommand("publish") + .about("Upload a package to the registry") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_index() + .arg(opt("token", "Token to use when uploading").value_name("TOKEN")) + .arg(opt( + "no-verify", + "Don't verify the contents by building them", + )) + .arg(opt( + "allow-dirty", + "Allow dirty working directories to be packaged", + )) + .arg_target_triple("Build for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_features() + .arg_jobs() + .arg_dry_run("Perform all checks without uploading") + .arg(opt("registry", "Registry to publish to").value_name("REGISTRY")) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let registry = args.registry(config)?; + let ws = args.workspace(config)?; + let index = args.index(config)?; + + ops::publish( + &ws, + &PublishOpts { + config, + token: args.value_of("token").map(|s| s.to_string()), + index, + verify: !args.is_present("no-verify"), + allow_dirty: args.is_present("allow-dirty"), + target: args.target(), + jobs: args.jobs()?, + dry_run: args.is_present("dry-run"), + registry, + features: args._values_of("features"), + all_features: args.is_present("all-features"), + no_default_features: args.is_present("no-default-features"), + }, + )?; + Ok(()) +} diff --git a/src/bin/cargo/commands/read_manifest.rs b/src/bin/cargo/commands/read_manifest.rs new file mode 100644 index 00000000000..fe2528b18aa --- /dev/null +++ b/src/bin/cargo/commands/read_manifest.rs @@ -0,0 +1,22 @@ +use crate::command_prelude::*; + +use cargo::print_json; + +pub fn cli() -> App { + subcommand("read-manifest") + .about( + "\ +Print a JSON representation of a Cargo.toml manifest. + +Deprecated, use `cargo metadata --no-deps` instead.\ +", + ) + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_manifest_path() +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + print_json(&ws.current()?); + Ok(()) +} diff --git a/src/bin/cargo/commands/run.rs b/src/bin/cargo/commands/run.rs new file mode 100644 index 00000000000..f30e8570d01 --- /dev/null +++ b/src/bin/cargo/commands/run.rs @@ -0,0 +1,97 @@ +use crate::command_prelude::*; + +use cargo::core::Verbosity; +use cargo::ops::{self, CompileFilter}; + +pub fn cli() -> App { + subcommand("run") + // subcommand aliases are handled in aliased_command() + // .alias("r") + .setting(AppSettings::TrailingVarArg) + .about("Run a binary or example of the local package") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("args").multiple(true)) + .arg_targets_bin_example( + "Name of the bin target to run", + "Name of the example target to run", + ) + .arg_package("Package with the target to run") + .arg_jobs() + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +If neither `--bin` nor `--example` are given, then if the package only has one +bin target it will be run. Otherwise `--bin` specifies the bin target to run, +and `--example` specifies the example target to run. At most one of `--bin` or +`--example` can be provided. + +All the arguments following the two dashes (`--`) are passed to the binary to +run. If you're passing arguments to both Cargo and the binary, the ones after +`--` go to the binary, the ones before go to Cargo. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + + let mut compile_opts = args.compile_options(config, CompileMode::Build, Some(&ws))?; + + if !args.is_present("example") && !args.is_present("bin") { + let default_runs: Vec<_> = compile_opts + .spec + .get_packages(&ws)? + .iter() + .filter_map(|pkg| pkg.manifest().default_run()) + .collect(); + if default_runs.len() == 1 { + compile_opts.filter = CompileFilter::from_raw_arguments( + false, + vec![default_runs[0].to_owned()], + false, + vec![], + false, + vec![], + false, + vec![], + false, + false, + ); + } else { + // ops::run will take care of errors if len pkgs != 1. + compile_opts.filter = CompileFilter::Default { + // Force this to false because the code in ops::run is not + // able to pre-check features before compilation starts to + // enforce that only 1 binary is built. + required_features_filterable: false, + }; + } + }; + match ops::run(&ws, &compile_opts, &values_os(args, "args"))? { + None => Ok(()), + Some(err) => { + // If we never actually spawned the process then that sounds pretty + // bad and we always want to forward that up. + let exit = match err.exit { + Some(exit) => exit, + None => return Err(CliError::new(err.into(), 101)), + }; + + // If `-q` was passed then we suppress extra error information about + // a failed process, we assume the process itself printed out enough + // information about why it failed so we don't do so as well + let exit_code = exit.code().unwrap_or(101); + let is_quiet = config.shell().verbosity() == Verbosity::Quiet; + Err(if is_quiet { + CliError::code(exit_code) + } else { + CliError::new(err.into(), exit_code) + }) + } + } +} diff --git a/src/bin/cargo/commands/rustc.rs b/src/bin/cargo/commands/rustc.rs new file mode 100644 index 00000000000..5cb7f94e09b --- /dev/null +++ b/src/bin/cargo/commands/rustc.rs @@ -0,0 +1,75 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("rustc") + .setting(AppSettings::TrailingVarArg) + .about("Compile a package and all of its dependencies") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("args").multiple(true)) + .arg_package("Package to build") + .arg_jobs() + .arg_targets_all( + "Build only this package's library", + "Build only the specified binary", + "Build all binaries", + "Build only the specified example", + "Build all examples", + "Build only the specified test target", + "Build all tests", + "Build only the specified bench target", + "Build all benches", + "Build all targets", + ) + .arg_release("Build artifacts in release mode, with optimizations") + .arg(opt("profile", "Profile to build the selected target for").value_name("PROFILE")) + .arg_features() + .arg_target_triple("Target triple which compiles will be for") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +The specified target for the current package (or package specified by SPEC if +provided) will be compiled along with all of its dependencies. The specified +... will all be passed to the final compiler invocation, not any of the +dependencies. Note that the compiler will still unconditionally receive +arguments such as -L, --extern, and --crate-type, and the specified ... +will simply be added to the compiler invocation. + +This command requires that only one target is being compiled. If more than one +target is available for the current package the filters of --lib, --bin, etc, +must be used to select which target is compiled. To pass flags to all compiler +processes spawned by Cargo, use the $RUSTFLAGS environment variable or the +`build.rustflags` configuration option. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let mode = match args.value_of("profile") { + Some("dev") | None => CompileMode::Build, + Some("test") => CompileMode::Test, + Some("bench") => CompileMode::Bench, + Some("check") => CompileMode::Check { test: false }, + Some(mode) => { + let err = failure::format_err!( + "unknown profile: `{}`, use dev, + test, or bench", + mode + ); + return Err(CliError::new(err, 101)); + } + }; + let mut compile_opts = args.compile_options_for_single_package(config, mode, Some(&ws))?; + let target_args = values(args, "args"); + compile_opts.target_rustc_args = if target_args.is_empty() { + None + } else { + Some(target_args) + }; + ops::compile(&ws, &compile_opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/rustdoc.rs b/src/bin/cargo/commands/rustdoc.rs new file mode 100644 index 00000000000..6616dc70e18 --- /dev/null +++ b/src/bin/cargo/commands/rustdoc.rs @@ -0,0 +1,71 @@ +use cargo::ops::{self, DocOptions}; + +use crate::command_prelude::*; + +pub fn cli() -> App { + subcommand("rustdoc") + .setting(AppSettings::TrailingVarArg) + .about("Build a package's documentation, using specified custom flags.") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("args").multiple(true)) + .arg(opt( + "open", + "Opens the docs in a browser after the operation", + )) + .arg_package("Package to document") + .arg_jobs() + .arg_targets_all( + "Build only this package's library", + "Build only the specified binary", + "Build all binaries", + "Build only the specified example", + "Build all examples", + "Build only the specified test target", + "Build all tests", + "Build only the specified bench target", + "Build all benches", + "Build all targets", + ) + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +The specified target for the current package (or package specified by SPEC if +provided) will be documented with the specified `...` being passed to the +final rustdoc invocation. Dependencies will not be documented as part of this +command. Note that rustdoc will still unconditionally receive arguments such +as `-L`, `--extern`, and `--crate-type`, and the specified `...` will +simply be added to the rustdoc invocation. + +If the `--package` argument is given, then SPEC is a package ID specification +which indicates which package should be documented. If it is not given, then the +current package is documented. For more information on SPEC and its format, see +the `cargo help pkgid` command. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + let mut compile_opts = args.compile_options_for_single_package( + config, + CompileMode::Doc { deps: false }, + Some(&ws), + )?; + let target_args = values(args, "args"); + compile_opts.target_rustdoc_args = if target_args.is_empty() { + None + } else { + Some(target_args) + }; + let doc_opts = DocOptions { + open_result: args.is_present("open"), + compile_opts, + }; + ops::doc(&ws, &doc_opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/search.rs b/src/bin/cargo/commands/search.rs new file mode 100644 index 00000000000..15bab64312a --- /dev/null +++ b/src/bin/cargo/commands/search.rs @@ -0,0 +1,32 @@ +use crate::command_prelude::*; + +use std::cmp::min; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("search") + .about("Search packages in crates.io") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("query").multiple(true)) + .arg_index() + .arg( + opt( + "limit", + "Limit the number of results (default: 10, max: 100)", + ) + .value_name("LIMIT"), + ) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let registry = args.registry(config)?; + let index = args.index(config)?; + let limit = args.value_of_u32("limit")?; + let limit = min(100, limit.unwrap_or(10)); + let query: Vec<&str> = args.values_of("query").unwrap_or_default().collect(); + let query: String = query.join("+"); + ops::search(&query, config, index, limit, registry)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/test.rs b/src/bin/cargo/commands/test.rs new file mode 100644 index 00000000000..06ef20d849c --- /dev/null +++ b/src/bin/cargo/commands/test.rs @@ -0,0 +1,163 @@ +use cargo::ops::{self, CompileFilter, FilterRule, LibRule}; + +use crate::command_prelude::*; + +pub fn cli() -> App { + subcommand("test") + // Subcommand aliases are handled in `aliased_command()`. + // .alias("t") + .setting(AppSettings::TrailingVarArg) + .about("Execute all unit and integration tests and build examples of a local package") + .arg( + Arg::with_name("TESTNAME") + .help("If specified, only run tests containing this string in their names"), + ) + .arg( + Arg::with_name("args") + .help("Arguments for the test binary") + .multiple(true) + .last(true), + ) + .arg( + opt( + "quiet", + "Display one character per test instead of one line", + ) + .short("q"), + ) + .arg_targets_all( + "Test only this package's library unit tests", + "Test only the specified binary", + "Test all binaries", + "Test only the specified example", + "Test all examples", + "Test only the specified test target", + "Test all tests", + "Test only the specified bench target", + "Test all benches", + "Test all targets", + ) + .arg(opt("doc", "Test only this library's documentation")) + .arg(opt("no-run", "Compile, but don't run tests")) + .arg(opt("no-fail-fast", "Run all tests regardless of failure")) + .arg_package_spec( + "Package to run tests for", + "Test all packages in the workspace", + "Exclude packages from the test", + ) + .arg_jobs() + .arg_release("Build artifacts in release mode, with optimizations") + .arg_features() + .arg_target_triple("Build for the target triple") + .arg_target_dir() + .arg_manifest_path() + .arg_message_format() + .after_help( + "\ +The test filtering argument TESTNAME and all the arguments following the +two dashes (`--`) are passed to the test binaries and thus to libtest +(rustc's built in unit-test and micro-benchmarking framework). If you're +passing arguments to both Cargo and the binary, the ones after `--` go to the +binary, the ones before go to Cargo. For details about libtest's arguments see +the output of `cargo test -- --help`. As an example, this will run all +tests with `foo` in their name on 3 threads in parallel: + + cargo test foo -- --test-threads 3 + +If the `--package` argument is given, then SPEC is a package ID specification +which indicates which package should be tested. If it is not given, then the +current package is tested. For more information on SPEC and its format, see the +`cargo help pkgid` command. + +All packages in the workspace are tested if the `--all` flag is supplied. The +`--all` flag is automatically assumed for a virtual manifest. +Note that `--exclude` has to be specified in conjunction with the `--all` flag. + +The `--jobs` argument affects the building of the test executable but does +not affect how many jobs are used when running the tests. The default value +for the `--jobs` argument is the number of CPUs. If you want to control the +number of simultaneous running test cases, pass the `--test-threads` option +to the test binaries: + + cargo test -- --test-threads=1 + +Compilation can be configured via the `test` profile in the manifest. + +By default the rust test harness hides output from test execution to +keep results readable. Test output can be recovered (e.g., for debugging) +by passing `--nocapture` to the test binaries: + + cargo test -- --nocapture + +To get the list of all options available for the test binaries use this: + + cargo test -- --help +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + + let mut compile_opts = args.compile_options(config, CompileMode::Test, Some(&ws))?; + + // `TESTNAME` is actually an argument of the test binary, but it's + // important, so we explicitly mention it and reconfigure. + let test_name: Option<&str> = args.value_of("TESTNAME"); + let test_args = args.value_of("TESTNAME").into_iter(); + let test_args = test_args.chain(args.values_of("args").unwrap_or_default()); + let test_args = test_args.collect::>(); + + let no_run = args.is_present("no-run"); + let doc = args.is_present("doc"); + if doc { + if let CompileFilter::Only { .. } = compile_opts.filter { + return Err(CliError::new( + failure::format_err!("Can't mix --doc with other target selecting options"), + 101, + )); + } + if no_run { + return Err(CliError::new( + failure::format_err!("Can't skip running doc tests with --no-run"), + 101, + )); + } + compile_opts.build_config.mode = CompileMode::Doctest; + compile_opts.filter = ops::CompileFilter::new( + LibRule::True, + FilterRule::none(), + FilterRule::none(), + FilterRule::none(), + FilterRule::none(), + ); + } else if test_name.is_some() { + if let CompileFilter::Default { .. } = compile_opts.filter { + compile_opts.filter = ops::CompileFilter::new( + LibRule::Default, // compile the library, so the unit tests can be run filtered + FilterRule::All, // compile the binaries, so the unit tests in binaries can be run filtered + FilterRule::All, // compile the tests, so the integration tests can be run filtered + FilterRule::none(), // specify --examples to unit test binaries filtered + FilterRule::none(), // specify --benches to unit test benchmarks filtered + ); // also, specify --doc to run doc tests filtered + } + } + + let ops = ops::TestOptions { + no_run, + no_fail_fast: args.is_present("no-fail-fast"), + compile_opts, + }; + + let err = ops::run_tests(&ws, &ops, &test_args)?; + match err { + None => Ok(()), + Some(err) => Err(match err.exit.as_ref().and_then(|e| e.code()) { + Some(i) => CliError::new( + failure::format_err!("{}", err.hint(&ws, &ops.compile_opts)), + i, + ), + None => CliError::new(err.into(), 101), + }), + } +} diff --git a/src/bin/cargo/commands/uninstall.rs b/src/bin/cargo/commands/uninstall.rs new file mode 100644 index 00000000000..4756da1cbbb --- /dev/null +++ b/src/bin/cargo/commands/uninstall.rs @@ -0,0 +1,31 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("uninstall") + .about("Remove a Rust binary") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("spec").multiple(true)) + .arg_package_spec_simple("Package to uninstall") + .arg(multi_opt("bin", "NAME", "Only uninstall the binary NAME")) + .arg(opt("root", "Directory to uninstall packages from").value_name("DIR")) + .after_help( + "\ +The argument SPEC is a package ID specification (see `cargo help pkgid`) to +specify which crate should be uninstalled. By default all binaries are +uninstalled for a crate but the `--bin` and `--example` flags can be used to +only uninstall particular binaries. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let root = args.value_of("root"); + let specs = args + .values_of("spec") + .unwrap_or_else(|| args.values_of("package").unwrap_or_default()) + .collect(); + ops::uninstall(root, specs, &values(args, "bin"), config)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/update.rs b/src/bin/cargo/commands/update.rs new file mode 100644 index 00000000000..3ffadcefcbe --- /dev/null +++ b/src/bin/cargo/commands/update.rs @@ -0,0 +1,54 @@ +use crate::command_prelude::*; + +use cargo::ops::{self, UpdateOptions}; + +pub fn cli() -> App { + subcommand("update") + .about("Update dependencies as recorded in the local lock file") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_package_spec_simple("Package to update") + .arg(opt( + "aggressive", + "Force updating all dependencies of as well", + )) + .arg_dry_run("Don't actually write the lockfile") + .arg(opt("precise", "Update a single dependency to exactly PRECISE").value_name("PRECISE")) + .arg_manifest_path() + .after_help( + "\ +This command requires that a `Cargo.lock` already exists as generated by +`cargo build` or related commands. + +If SPEC is given, then a conservative update of the lockfile will be +performed. This means that only the dependency specified by SPEC will be +updated. Its transitive dependencies will be updated only if SPEC cannot be +updated without updating dependencies. All other dependencies will remain +locked at their currently recorded versions. + +If PRECISE is specified, then `--aggressive` must not also be specified. The +argument PRECISE is a string representing a precise revision that the package +being updated should be updated to. For example, if the package comes from a git +repository, then PRECISE would be the exact revision that the repository should +be updated to. + +If SPEC is not given, then all dependencies will be re-resolved and +updated. + +For more information about package ID specifications, see `cargo help pkgid`. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let ws = args.workspace(config)?; + + let update_opts = UpdateOptions { + aggressive: args.is_present("aggressive"), + precise: args.value_of("precise"), + to_update: values(args, "package"), + dry_run: args.is_present("dry-run"), + config, + }; + ops::update_lockfile(&ws, &update_opts)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/vendor.rs b/src/bin/cargo/commands/vendor.rs new file mode 100644 index 00000000000..da0e5838562 --- /dev/null +++ b/src/bin/cargo/commands/vendor.rs @@ -0,0 +1,127 @@ +use crate::command_prelude::*; +use cargo::ops; +use std::path::PathBuf; + +pub fn cli() -> App { + subcommand("vendor") + .about("Vendor all dependencies for a project locally") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_manifest_path() + .arg(Arg::with_name("path").help("Where to vendor crates (`vendor` by default)")) + .arg( + Arg::with_name("no-delete") + .long("no-delete") + .help("Don't delete older crates in the vendor directory"), + ) + .arg( + Arg::with_name("tomls") + .short("s") + .long("sync") + .help("Additional `Cargo.toml` to sync and vendor") + .value_name("TOML") + .multiple(true), + ) + .arg( + Arg::with_name("respect-source-config") + .long("respect-source-config") + .help("Respect `[source]` config in `.cargo/config`") + .multiple(true), + ) + .arg( + Arg::with_name("no-merge-sources") + .long("no-merge-sources") + .hidden(true), + ) + .arg( + Arg::with_name("relative-path") + .long("relative-path") + .hidden(true), + ) + .arg( + Arg::with_name("only-git-deps") + .long("only-git-deps") + .hidden(true), + ) + .arg( + Arg::with_name("explicit-version") + .short("-x") + .long("explicit-version") + .hidden(true), + ) + .arg( + Arg::with_name("disallow-duplicates") + .long("disallow-duplicates") + .hidden(true), + ) + .after_help( + "\ +This cargo subcommand will vendor all crates.io and git dependencies for a +project into the specified directory at ``. After this command completes +the vendor directory specified by `` will contain all remote sources from +dependencies specified. Additional manifests beyond the default one can be +specified with the `-s` option. + +The `cargo vendor` command will also print out the configuration necessary +to use the vendored sources, which you will need to add to `.cargo/config`. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + // We're doing the vendoring operation outselves, so we don't actually want + // to respect any of the `source` configuration in Cargo itself. That's + // intended for other consumers of Cargo, but we want to go straight to the + // source, e.g. crates.io, to fetch crates. + if !args.is_present("respect-source-config") { + config.values_mut()?.remove("source"); + } + + // When we moved `cargo vendor` into Cargo itself we didn't stabilize a few + // flags, so try to provide a helpful error message in that case to enusre + // that users currently using the flag aren't tripped up. + let crates_io_cargo_vendor_flag = if args.is_present("no-merge-sources") { + Some("--no-merge-sources") + } else if args.is_present("relative-path") { + Some("--relative-path") + } else if args.is_present("only-git-deps") { + Some("--only-git-deps") + } else if args.is_present("explicit-version") { + Some("--explicit-version") + } else if args.is_present("disallow-duplicates") { + Some("--disallow-duplicates") + } else { + None + }; + if let Some(flag) = crates_io_cargo_vendor_flag { + return Err(failure::format_err!( + "\ +the crates.io `cargo vendor` command has now been merged into Cargo itself +and does not support the flag `{}` currently; to continue using the flag you +can execute `cargo-vendor vendor ...`, and if you would like to see this flag +supported in Cargo itself please feel free to file an issue at +https://github.com/rust-lang/cargo/issues/new +", + flag + ) + .into()); + } + + let ws = args.workspace(config)?; + let path = args + .value_of_os("path") + .map(|val| PathBuf::from(val.to_os_string())) + .unwrap_or_else(|| PathBuf::from("vendor")); + ops::vendor( + &ws, + &ops::VendorOptions { + no_delete: args.is_present("no-delete"), + destination: &path, + extra: args + .values_of_os("tomls") + .unwrap_or_default() + .map(|s| PathBuf::from(s.to_os_string())) + .collect(), + }, + )?; + Ok(()) +} diff --git a/src/bin/cargo/commands/verify_project.rs b/src/bin/cargo/commands/verify_project.rs new file mode 100644 index 00000000000..fe2b42aebed --- /dev/null +++ b/src/bin/cargo/commands/verify_project.rs @@ -0,0 +1,31 @@ +use crate::command_prelude::*; + +use std::collections::HashMap; +use std::process; + +use cargo::print_json; + +pub fn cli() -> App { + subcommand("verify-project") + .about("Check correctness of crate manifest") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg_manifest_path() +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + fn fail(reason: &str, value: &str) -> ! { + let mut h = HashMap::new(); + h.insert(reason.to_string(), value.to_string()); + print_json(&h); + process::exit(1) + } + + if let Err(e) = args.workspace(config) { + fail("invalid", &e.to_string()) + } + + let mut h = HashMap::new(); + h.insert("success".to_string(), "true".to_string()); + print_json(&h); + Ok(()) +} diff --git a/src/bin/cargo/commands/version.rs b/src/bin/cargo/commands/version.rs new file mode 100644 index 00000000000..81c6838e7ab --- /dev/null +++ b/src/bin/cargo/commands/version.rs @@ -0,0 +1,16 @@ +use crate::command_prelude::*; + +use crate::cli; + +pub fn cli() -> App { + subcommand("version") + .about("Show version information") + .arg(opt("quiet", "No output printed to stdout").short("q")) +} + +pub fn exec(_config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let verbose = args.occurrences_of("verbose") > 0; + let version = cli::get_version_string(verbose); + print!("{}", version); + Ok(()) +} diff --git a/src/bin/cargo/commands/yank.rs b/src/bin/cargo/commands/yank.rs new file mode 100644 index 00000000000..3079c544fb9 --- /dev/null +++ b/src/bin/cargo/commands/yank.rs @@ -0,0 +1,44 @@ +use crate::command_prelude::*; + +use cargo::ops; + +pub fn cli() -> App { + subcommand("yank") + .about("Remove a pushed crate from the index") + .arg(opt("quiet", "No output printed to stdout").short("q")) + .arg(Arg::with_name("crate")) + .arg(opt("vers", "The version to yank or un-yank").value_name("VERSION")) + .arg(opt( + "undo", + "Undo a yank, putting a version back into the index", + )) + .arg(opt("index", "Registry index to yank from").value_name("INDEX")) + .arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) + .arg(opt("registry", "Registry to use").value_name("REGISTRY")) + .after_help( + "\ +The yank command removes a previously pushed crate's version from the server's +index. This command does not delete any data, and the crate will still be +available for download via the registry's download link. + +Note that existing crates locked to a yanked version will still be able to +download the yanked version to use it. Cargo will, however, not allow any new +crates to be locked to any yanked version. +", + ) +} + +pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { + let registry = args.registry(config)?; + + ops::yank( + config, + args.value_of("crate").map(|s| s.to_string()), + args.value_of("vers").map(|s| s.to_string()), + args.value_of("token").map(|s| s.to_string()), + args.value_of("index").map(|s| s.to_string()), + args.is_present("undo"), + registry, + )?; + Ok(()) +} diff --git a/src/bin/cargo/main.rs b/src/bin/cargo/main.rs new file mode 100644 index 00000000000..9ed135cf123 --- /dev/null +++ b/src/bin/cargo/main.rs @@ -0,0 +1,200 @@ +#![warn(rust_2018_idioms)] // while we're getting used to 2018 +#![allow(clippy::too_many_arguments)] // large project +#![allow(clippy::redundant_closure)] // there's a false positive +#![warn(clippy::needless_borrow)] +#![warn(clippy::redundant_clone)] + +use std::collections::BTreeSet; +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; + +use cargo::core::shell::Shell; +use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config}; +use cargo::util::{CliError, ProcessError}; + +mod cli; +mod commands; + +use crate::command_prelude::*; + +fn main() { + #[cfg(feature = "pretty-env-logger")] + pretty_env_logger::init_custom_env("CARGO_LOG"); + #[cfg(not(feature = "pretty-env-logger"))] + env_logger::init_from_env("CARGO_LOG"); + cargo::core::maybe_allow_nightly_features(); + + let mut config = match Config::default() { + Ok(cfg) => cfg, + Err(e) => { + let mut shell = Shell::new(); + cargo::exit_with_error(e.into(), &mut shell) + } + }; + + let result = match cargo::ops::fix_maybe_exec_rustc() { + Ok(true) => Ok(()), + Ok(false) => { + init_git_transports(&config); + let _token = cargo::util::job::setup(); + cli::main(&mut config) + } + Err(e) => Err(CliError::from(e)), + }; + + match result { + Err(e) => cargo::exit_with_error(e, &mut *config.shell()), + Ok(()) => {} + } +} + +fn aliased_command(config: &Config, command: &str) -> CargoResult>> { + let alias_name = format!("alias.{}", command); + let user_alias = match config.get_string(&alias_name) { + Ok(Some(record)) => Some( + record + .val + .split_whitespace() + .map(|s| s.to_string()) + .collect(), + ), + Ok(None) => None, + Err(_) => config + .get_list(&alias_name)? + .map(|record| record.val.iter().map(|s| s.0.to_string()).collect()), + }; + let result = user_alias.or_else(|| match command { + "b" => Some(vec!["build".to_string()]), + "c" => Some(vec!["check".to_string()]), + "r" => Some(vec!["run".to_string()]), + "t" => Some(vec!["test".to_string()]), + _ => None, + }); + Ok(result) +} + +/// List all runnable commands +fn list_commands(config: &Config) -> BTreeSet { + let prefix = "cargo-"; + let suffix = env::consts::EXE_SUFFIX; + let mut commands = BTreeSet::new(); + for dir in search_directories(config) { + let entries = match fs::read_dir(dir) { + Ok(entries) => entries, + _ => continue, + }; + for entry in entries.filter_map(|e| e.ok()) { + let path = entry.path(); + let filename = match path.file_name().and_then(|s| s.to_str()) { + Some(filename) => filename, + _ => continue, + }; + if !filename.starts_with(prefix) || !filename.ends_with(suffix) { + continue; + } + if is_executable(entry.path()) { + let end = filename.len() - suffix.len(); + commands.insert(CommandInfo::External { + name: filename[prefix.len()..end].to_string(), + path: path.clone(), + }); + } + } + } + + for cmd in commands::builtin() { + commands.insert(CommandInfo::BuiltIn { + name: cmd.get_name().to_string(), + about: cmd.p.meta.about.map(|s| s.to_string()), + }); + } + + commands +} + +fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> CliResult { + let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); + let path = search_directories(config) + .iter() + .map(|dir| dir.join(&command_exe)) + .find(|file| is_executable(file)); + let command = match path { + Some(command) => command, + None => { + let cmds = list_commands(config); + let did_you_mean = closest_msg(cmd, cmds.iter(), |c| c.name()); + let err = failure::format_err!("no such subcommand: `{}`{}", cmd, did_you_mean); + return Err(CliError::new(err, 101)); + } + }; + + let cargo_exe = config.cargo_exe()?; + let err = match util::process(&command) + .env(cargo::CARGO_ENV, cargo_exe) + .args(args) + .exec_replace() + { + Ok(()) => return Ok(()), + Err(e) => e, + }; + + if let Some(perr) = err.downcast_ref::() { + if let Some(code) = perr.exit.as_ref().and_then(|c| c.code()) { + return Err(CliError::code(code)); + } + } + Err(CliError::new(err, 101)) +} + +#[cfg(unix)] +fn is_executable>(path: P) -> bool { + use std::os::unix::prelude::*; + fs::metadata(path) + .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0) + .unwrap_or(false) +} +#[cfg(windows)] +fn is_executable>(path: P) -> bool { + fs::metadata(path) + .map(|metadata| metadata.is_file()) + .unwrap_or(false) +} + +fn search_directories(config: &Config) -> Vec { + let mut dirs = vec![config.home().clone().into_path_unlocked().join("bin")]; + if let Some(val) = env::var_os("PATH") { + dirs.extend(env::split_paths(&val)); + } + dirs +} + +fn init_git_transports(config: &Config) { + // Only use a custom transport if any HTTP options are specified, + // such as proxies or custom certificate authorities. The custom + // transport, however, is not as well battle-tested. + + match cargo::ops::needs_custom_http_transport(config) { + Ok(true) => {} + _ => return, + } + + let handle = match cargo::ops::http_handle(config) { + Ok(handle) => handle, + Err(..) => return, + }; + + // The unsafety of the registration function derives from two aspects: + // + // 1. This call must be synchronized with all other registration calls as + // well as construction of new transports. + // 2. The argument is leaked. + // + // We're clear on point (1) because this is only called at the start of this + // binary (we know what the state of the world looks like) and we're mostly + // clear on point (2) because we'd only free it after everything is done + // anyway + unsafe { + git2_curl::register(handle); + } +} diff --git a/src/bin/clean.rs b/src/bin/clean.rs deleted file mode 100644 index 18c6111e56e..00000000000 --- a/src/bin/clean.rs +++ /dev/null @@ -1,52 +0,0 @@ -use std::env; - -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; - -#[derive(RustcDecodable)] -struct Options { - flag_package: Option, - flag_target: Option, - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, -} - -pub const USAGE: &'static str = " -Remove artifacts that cargo has generated in the past - -Usage: - cargo clean [options] - -Options: - -h, --help Print this message - -p SPEC, --package SPEC Package to clean artifacts for - --manifest-path PATH Path to the manifest to the package to clean - --target TRIPLE Target triple to clean output for (default all) - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -If the --package argument is given, then SPEC is a package id specification -which indicates which package's artifacts should be cleaned out. If it is not -given, then all packages' artifacts are removed. For more information on SPEC -and its format, see the `cargo help pkgid` command. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - debug!("executing; cmd=cargo-clean; args={:?}", env::args().collect::>()); - - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - let opts = ops::CleanOptions { - config: config, - spec: options.flag_package.as_ref().map(|s| &s[..]), - target: options.flag_target.as_ref().map(|s| &s[..]), - }; - ops::clean(&root, &opts).map(|_| None).map_err(|err| { - CliError::from_boxed(err, 101) - }) -} diff --git a/src/bin/doc.rs b/src/bin/doc.rs deleted file mode 100644 index 6bc43a86b04..00000000000 --- a/src/bin/doc.rs +++ /dev/null @@ -1,80 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; - -#[derive(RustcDecodable)] -struct Options { - flag_target: Option, - flag_features: Vec, - flag_jobs: Option, - flag_manifest_path: Option, - flag_no_default_features: bool, - flag_no_deps: bool, - flag_open: bool, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_package: Option, -} - -pub const USAGE: &'static str = " -Build a package's documentation - -Usage: - cargo doc [options] - -Options: - -h, --help Print this message - --open Opens the docs in a browser after the operation - -p SPEC, --package SPEC Package to document - --no-deps Don't build documentation for dependencies - -j N, --jobs N The number of jobs to run in parallel - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to document - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -By default the documentation for the local package and all dependencies is -built. The output is all placed in `target/doc` in rustdoc's usual format. - -If the --package argument is given, then SPEC is a package id specification -which indicates which package should be documented. If it is not given, then the -current package is documented. For more information on SPEC and its format, see -the `cargo help pkgid` command. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - - let mut doc_opts = ops::DocOptions { - open_result: options.flag_open, - compile_opts: ops::CompileOptions { - config: config, - jobs: options.flag_jobs, - target: options.flag_target.as_ref().map(|t| &t[..]), - features: &options.flag_features, - no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), - exec_engine: None, - filter: ops::CompileFilter::Everything, - release: false, - mode: ops::CompileMode::Doc { - deps: !options.flag_no_deps, - }, - target_rustc_args: None, - }, - }; - - try!(ops::doc(&root, &mut doc_opts).map_err(|err| { - CliError::from_boxed(err, 101) - })); - - Ok(None) -} - diff --git a/src/bin/fetch.rs b/src/bin/fetch.rs deleted file mode 100644 index 94087f026c3..00000000000 --- a/src/bin/fetch.rs +++ /dev/null @@ -1,46 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::find_root_manifest_for_cwd; - -#[derive(RustcDecodable)] -struct Options { - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, -} - -pub const USAGE: &'static str = " -Fetch dependencies of a package from the network. - -Usage: - cargo fetch [options] - -Options: - -h, --help Print this message - --manifest-path PATH Path to the manifest to fetch dependencies for - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -If a lockfile is available, this command will ensure that all of the git -dependencies and/or registries dependencies are downloaded and locally -available. The network is never touched after a `cargo fetch` unless -the lockfile changes. - -If the lockfile is not available, then this is the equivalent of -`cargo generate-lockfile`. A lockfile is generated and dependencies are also -all updated. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - try!(ops::fetch(&root, config).map_err(|e| { - CliError::from_boxed(e, 101) - })); - Ok(None) -} - - diff --git a/src/bin/generate_lockfile.rs b/src/bin/generate_lockfile.rs deleted file mode 100644 index 29891f1c846..00000000000 --- a/src/bin/generate_lockfile.rs +++ /dev/null @@ -1,37 +0,0 @@ -use std::env; - -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::find_root_manifest_for_cwd; - -#[derive(RustcDecodable)] -struct Options { - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, -} - -pub const USAGE: &'static str = " -Generate the lockfile for a project - -Usage: - cargo generate-lockfile [options] - -Options: - -h, --help Print this message - --manifest-path PATH Path to the manifest to generate a lockfile for - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - debug!("executing; cmd=cargo-generate-lockfile; args={:?}", env::args().collect::>()); - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - - ops::generate_lockfile(&root, config) - .map(|_| None).map_err(|err| CliError::from_boxed(err, 101)) -} diff --git a/src/bin/git_checkout.rs b/src/bin/git_checkout.rs deleted file mode 100644 index ff4d5f4c46c..00000000000 --- a/src/bin/git_checkout.rs +++ /dev/null @@ -1,47 +0,0 @@ -use cargo::core::source::{Source, SourceId, GitReference}; -use cargo::sources::git::{GitSource}; -use cargo::util::{Config, CliResult, CliError, human, ToUrl}; - -#[derive(RustcDecodable)] -struct Options { - flag_url: String, - flag_reference: String, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, -} - -pub const USAGE: &'static str = " -Usage: - cargo git-checkout [options] --url=URL --reference=REF - cargo git-checkout -h | --help - -Options: - -h, --help Print this message - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let Options { flag_url: url, flag_reference: reference, .. } = options; - - let url = try!(url.to_url().map_err(|e| { - human(format!("The URL `{}` you passed was \ - not a valid URL: {}", url, e)) - }) - .map_err(|e| CliError::from_boxed(e, 1))); - - let reference = GitReference::Branch(reference.to_string()); - let source_id = SourceId::for_git(&url, reference); - - let mut source = GitSource::new(&source_id, config); - - try!(source.update().map_err(|e| { - CliError::new(&format!("Couldn't update {:?}: {:?}", source, e), 1) - })); - - Ok(None) -} diff --git a/src/bin/help.rs b/src/bin/help.rs deleted file mode 100644 index f07a1cc669a..00000000000 --- a/src/bin/help.rs +++ /dev/null @@ -1,22 +0,0 @@ -use cargo::util::{CliResult, CliError, Config}; - -#[derive(RustcDecodable)] -struct Options; - -pub const USAGE: &'static str = " -Get some help with a cargo command. - -Usage: - cargo help - cargo help -h | --help - -Options: - -h, --help Print this message -"; - -pub fn execute(_: Options, _: &Config) -> CliResult> { - // This is a dummy command just so that `cargo help help` works. - // The actual delegation of help flag to subcommands is handled by the - // cargo command. - Err(CliError::new("Help command should not be executed directly.", 101)) -} diff --git a/src/bin/locate_project.rs b/src/bin/locate_project.rs deleted file mode 100644 index 4c870ba4de2..00000000000 --- a/src/bin/locate_project.rs +++ /dev/null @@ -1,34 +0,0 @@ -use cargo::util::{CliResult, CliError, human, ChainError, Config}; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; - -#[derive(RustcDecodable)] -struct LocateProjectFlags { - flag_manifest_path: Option, -} - -pub const USAGE: &'static str = " -Usage: - cargo locate-project [options] - -Options: - --manifest-path PATH Path to the manifest to build benchmarks for - -h, --help Print this message -"; - -#[derive(RustcEncodable)] -struct ProjectLocation { - root: String -} - -pub fn execute(flags: LocateProjectFlags, - _: &Config) -> CliResult> { - let root = try!(find_root_manifest_for_cwd(flags.flag_manifest_path)); - - let string = try!(root.to_str() - .chain_error(|| human("Your project path contains \ - characters not representable in \ - Unicode")) - .map_err(|e| CliError::from_boxed(e, 1))); - - Ok(Some(ProjectLocation { root: string.to_string() })) -} diff --git a/src/bin/login.rs b/src/bin/login.rs deleted file mode 100644 index bfcc2f8ae3d..00000000000 --- a/src/bin/login.rs +++ /dev/null @@ -1,63 +0,0 @@ -use std::io::prelude::*; -use std::io; - -use cargo::ops; -use cargo::core::{SourceId, Source}; -use cargo::sources::RegistrySource; -use cargo::util::{CliResult, CliError, Config}; - -#[derive(RustcDecodable)] -struct Options { - flag_host: Option, - arg_token: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, -} - -pub const USAGE: &'static str = " -Save an api token from the registry locally - -Usage: - cargo login [options] [] - -Options: - -h, --help Print this message - --host HOST Host to set the token for - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let token = match options.arg_token.clone() { - Some(token) => token, - None => { - let err = (|| { - let src = try!(SourceId::for_central(config)); - let mut src = RegistrySource::new(&src, config); - try!(src.update()); - let config = try!(src.config()); - let host = options.flag_host.clone().unwrap_or(config.api); - println!("please visit {}me and paste the API Token below", - host); - let mut line = String::new(); - let input = io::stdin(); - try!(input.lock().read_line(&mut line)); - Ok(line) - })(); - - try!(err.map_err(|e| CliError::from_boxed(e, 101))) - } - }; - - let token = token.trim().to_string(); - try!(ops::registry_login(config, token).map_err(|e| { - CliError::from_boxed(e, 101) - })); - Ok(None) -} - diff --git a/src/bin/new.rs b/src/bin/new.rs deleted file mode 100644 index ee53ebe8cb8..00000000000 --- a/src/bin/new.rs +++ /dev/null @@ -1,55 +0,0 @@ -use std::env; - -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; - -#[derive(RustcDecodable)] -struct Options { - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_bin: bool, - arg_path: String, - flag_name: Option, - flag_vcs: Option, -} - -pub const USAGE: &'static str = " -Create a new cargo package at - -Usage: - cargo new [options] - cargo new -h | --help - -Options: - -h, --help Print this message - --vcs VCS Initialize a new repository for the given version - control system (git or hg) or do not initialize any version - control at all (none) overriding a global configuration. - --bin Use a binary instead of a library template - --name NAME Set the resulting package name - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - debug!("executing; cmd=cargo-new; args={:?}", env::args().collect::>()); - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - - let Options { flag_bin, arg_path, flag_name, flag_vcs, .. } = options; - - let opts = ops::NewOptions { - version_control: flag_vcs, - bin: flag_bin, - path: &arg_path, - name: flag_name.as_ref().map(|s| s.as_ref()), - }; - - ops::new(opts, config).map(|_| None).map_err(|err| { - CliError::from_boxed(err, 101) - }) -} - - diff --git a/src/bin/owner.rs b/src/bin/owner.rs deleted file mode 100644 index 38bb4ae41c6..00000000000 --- a/src/bin/owner.rs +++ /dev/null @@ -1,60 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; - -#[derive(RustcDecodable)] -struct Options { - arg_crate: Option, - flag_token: Option, - flag_add: Option>, - flag_remove: Option>, - flag_index: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_list: bool, -} - -pub const USAGE: &'static str = " -Manage the owners of a crate on the registry - -Usage: - cargo owner [options] [] - -Options: - -h, --help Print this message - -a, --add LOGIN Name of a user or team to add as an owner - -r, --remove LOGIN Name of a user or team to remove as an owner - -l, --list List owners of a crate - --index INDEX Registry index to modify owners for - --token TOKEN API token to use when authenticating - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -This command will modify the owners for a package on the specified registry (or -default). Note that owners of a package can upload new versions, yank old -versions. Explicitly named owners can also modify the set of owners, so take -caution! - -See http://doc.crates.io/crates-io.html#cargo-owner for detailed documentation -and troubleshooting. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let opts = ops::OwnersOptions { - krate: options.arg_crate, - token: options.flag_token, - index: options.flag_index, - to_add: options.flag_add, - to_remove: options.flag_remove, - list: options.flag_list, - }; - try!(ops::modify_owners(config, &opts).map_err(|e| { - CliError::from_boxed(e, 101) - })); - Ok(None) -} - - diff --git a/src/bin/package.rs b/src/bin/package.rs deleted file mode 100644 index 4d3b72b716a..00000000000 --- a/src/bin/package.rs +++ /dev/null @@ -1,44 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::find_root_manifest_for_cwd; - -#[derive(RustcDecodable)] -struct Options { - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_manifest_path: Option, - flag_no_verify: bool, - flag_no_metadata: bool, - flag_list: bool, -} - -pub const USAGE: &'static str = " -Assemble the local package into a distributable tarball - -Usage: - cargo package [options] - -Options: - -h, --help Print this message - -l, --list Print files included in a package without making one - --no-verify Don't verify the contents by building them - --no-metadata Ignore warnings about a lack of human-usable metadata - --manifest-path PATH Path to the manifest to compile - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - ops::package(&root, config, - !options.flag_no_verify, - options.flag_list, - !options.flag_no_metadata).map(|_| None).map_err(|err| { - CliError::from_boxed(err, 101) - }) -} diff --git a/src/bin/pkgid.rs b/src/bin/pkgid.rs deleted file mode 100644 index abbe4e52fc7..00000000000 --- a/src/bin/pkgid.rs +++ /dev/null @@ -1,61 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; - -#[derive(RustcDecodable)] -struct Options { - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_manifest_path: Option, - arg_spec: Option, -} - -pub const USAGE: &'static str = " -Print a fully qualified package specification - -Usage: - cargo pkgid [options] [] - -Options: - -h, --help Print this message - --manifest-path PATH Path to the manifest to the package to clean - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -Given a argument, print out the fully qualified package id specifier. -This command will generate an error if is ambiguous as to which package -it refers to in the dependency graph. If no is given, then the pkgid for -the local package is printed. - -This command requires that a lockfile is available and dependencies have been -fetched. - -Example Package IDs - - pkgid | name | version | url - |-----------------------------|--------|-----------|---------------------| - foo | foo | * | * - foo:1.2.3 | foo | 1.2.3 | * - crates.io/foo | foo | * | *://crates.io/foo - crates.io/foo#1.2.3 | foo | 1.2.3 | *://crates.io/foo - crates.io/bar#foo:1.2.3 | foo | 1.2.3 | *://crates.io/bar - http://crates.io/foo#1.2.3 | foo | 1.2.3 | http://crates.io/foo - -"; - -pub fn execute(options: Options, - config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path.clone())); - - let spec = options.arg_spec.as_ref().map(|s| &s[..]); - let spec = try!(ops::pkgid(&root, spec, config).map_err(|err| { - CliError::from_boxed(err, 101) - })); - println!("{}", spec); - Ok(None) -} - diff --git a/src/bin/publish.rs b/src/bin/publish.rs deleted file mode 100644 index 387daecd235..00000000000 --- a/src/bin/publish.rs +++ /dev/null @@ -1,49 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::find_root_manifest_for_cwd; - -#[derive(RustcDecodable)] -struct Options { - flag_host: Option, - flag_token: Option, - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_no_verify: bool, -} - -pub const USAGE: &'static str = " -Upload a package to the registry - -Usage: - cargo publish [options] - -Options: - -h, --help Print this message - --host HOST Host to upload the package to - --token TOKEN Token to use when uploading - --no-verify Don't verify package tarball before publish - --manifest-path PATH Path to the manifest to compile - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let Options { - flag_token: token, - flag_host: host, - flag_manifest_path, - flag_no_verify: no_verify, - .. - } = options; - - let root = try!(find_root_manifest_for_cwd(flag_manifest_path.clone())); - ops::publish(&root, config, token, host, !no_verify).map(|_| None).map_err(|err| { - CliError::from_boxed(err, 101) - }) -} diff --git a/src/bin/read_manifest.rs b/src/bin/read_manifest.rs deleted file mode 100644 index 2539655f859..00000000000 --- a/src/bin/read_manifest.rs +++ /dev/null @@ -1,43 +0,0 @@ -use std::env; -use std::error::Error; - -use cargo::core::{Package, Source}; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; -use cargo::sources::{PathSource}; - -#[derive(RustcDecodable)] -struct Options { - flag_manifest_path: Option, - flag_color: Option, -} - -pub const USAGE: &'static str = " -Usage: - cargo read-manifest [options] - cargo read-manifest -h | --help - -Options: - -h, --help Print this message - -v, --verbose Use verbose output - --manifest-path PATH Path to the manifest to compile - --color WHEN Coloring: auto, always, never -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - debug!("executing; cmd=cargo-read-manifest; args={:?}", - env::args().collect::>()); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - - let mut source = try!(PathSource::for_path(root.parent().unwrap(), config).map_err(|e| { - CliError::new(e.description(), 1) - })); - - try!(source.update().map_err(|err| CliError::new(err.description(), 1))); - - source.root_package() - .map(|pkg| Some(pkg)) - .map_err(|err| CliError::from_boxed(err, 1)) -} diff --git a/src/bin/run.rs b/src/bin/run.rs deleted file mode 100644 index c1c04295886..00000000000 --- a/src/bin/run.rs +++ /dev/null @@ -1,100 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; - -#[derive(RustcDecodable)] -struct Options { - flag_bin: Option, - flag_example: Option, - flag_jobs: Option, - flag_features: Vec, - flag_no_default_features: bool, - flag_target: Option, - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_release: bool, - arg_args: Vec, -} - -pub const USAGE: &'static str = " -Run the main binary of the local package (src/main.rs) - -Usage: - cargo run [options] [--] [...] - -Options: - -h, --help Print this message - --bin NAME Name of the bin target to run - --example NAME Name of the example target to run - -j N, --jobs N The number of jobs to run in parallel - --release Build artifacts in release mode, with optimizations - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to execute - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -If neither `--bin` nor `--example` are given, then if the project only has one -bin target it will be run. Otherwise `--bin` specifies the bin target to run, -and `--example` specifies the example target to run. At most one of `--bin` or -`--example` can be provided. - -All of the trailing arguments are passed to the binary to run. If you're passing -arguments to both Cargo and the binary, the ones after `--` go to the binary, -the ones before go to Cargo. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - - let (mut examples, mut bins) = (Vec::new(), Vec::new()); - if let Some(s) = options.flag_bin { - bins.push(s); - } - if let Some(s) = options.flag_example { - examples.push(s); - } - - let compile_opts = ops::CompileOptions { - config: config, - jobs: options.flag_jobs, - target: options.flag_target.as_ref().map(|t| &t[..]), - features: &options.flag_features, - no_default_features: options.flag_no_default_features, - spec: None, - exec_engine: None, - release: options.flag_release, - mode: ops::CompileMode::Build, - filter: if examples.is_empty() && bins.is_empty() { - ops::CompileFilter::Everything - } else { - ops::CompileFilter::Only { - lib: false, tests: &[], benches: &[], - bins: &bins, examples: &examples, - } - }, - target_rustc_args: None, - }; - - let err = try!(ops::run(&root, - &compile_opts, - &options.arg_args).map_err(|err| { - CliError::from_boxed(err, 101) - })); - match err { - None => Ok(None), - Some(err) => { - Err(match err.exit.as_ref().and_then(|e| e.code()) { - Some(i) => CliError::from_error(err, i), - None => CliError::from_error(err, 101), - }) - } - } -} diff --git a/src/bin/rustc.rs b/src/bin/rustc.rs deleted file mode 100644 index 8a589091aae..00000000000 --- a/src/bin/rustc.rs +++ /dev/null @@ -1,95 +0,0 @@ -use std::env; - -use cargo::ops::CompileOptions; -use cargo::ops; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; -use cargo::util::{CliResult, CliError, Config}; - -#[derive(RustcDecodable)] -struct Options { - arg_opts: Option>, - flag_package: Option, - flag_jobs: Option, - flag_features: Vec, - flag_no_default_features: bool, - flag_target: Option, - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_release: bool, - flag_lib: bool, - flag_bin: Vec, - flag_example: Vec, - flag_test: Vec, - flag_bench: Vec, -} - -pub const USAGE: &'static str = " -Compile a package and all of its dependencies - -Usage: - cargo rustc [options] [--] [...] - -Options: - -h, --help Print this message - -p SPEC, --package SPEC The profile to compile for - -j N, --jobs N The number of jobs to run in parallel - --lib Build only this package's library - --bin NAME Build only the specified binary - --example NAME Build only the specified example - --test NAME Build only the specified test target - --bench NAME Build only the specified benchmark target - --release Build artifacts in release mode, with optimizations - --features FEATURES Features to compile for the package - --no-default-features Do not compile default features for the package - --target TRIPLE Target triple which compiles will be for - --manifest-path PATH Path to the manifest to fetch dependencies for - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -The specified target for the current package (or package specified by SPEC if -provided) will be compiled along with all of its dependencies. The specified -... will all be passed to the final compiler invocation, not any of the -dependencies. Note that the compiler will still unconditionally receive -arguments such as -L, --extern, and --crate-type, and the specified ... -will simply be added to the compiler invocation. - -This command requires that only one target is being compiled. If more than one -target is available for the current package the filters of --lib, --bin, etc, -must be used to select which target is compiled. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - debug!("executing; cmd=cargo-rustc; args={:?}", - env::args().collect::>()); - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - - let opts = CompileOptions { - config: config, - jobs: options.flag_jobs, - target: options.flag_target.as_ref().map(|t| &t[..]), - features: &options.flag_features, - no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), - exec_engine: None, - mode: ops::CompileMode::Build, - release: options.flag_release, - filter: ops::CompileFilter::new(options.flag_lib, - &options.flag_bin, - &options.flag_test, - &options.flag_example, - &options.flag_bench), - target_rustc_args: options.arg_opts.as_ref().map(|a| &a[..]), - }; - - ops::compile(&root, &opts).map(|_| None).map_err(|err| { - CliError::from_boxed(err, 101) - }) -} - - diff --git a/src/bin/search.rs b/src/bin/search.rs deleted file mode 100644 index 05c5475855d..00000000000 --- a/src/bin/search.rs +++ /dev/null @@ -1,40 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; - -#[derive(RustcDecodable)] -struct Options { - flag_host: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - arg_query: String -} - -pub const USAGE: &'static str = " -Search packages in crates.io - -Usage: - cargo search [options] - cargo search [-h | --help] - -Options: - -h, --help Print this message - --host HOST Host of a registry to search in - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let Options { - flag_host: host, - arg_query: query, - .. - } = options; - - ops::search(&query, config, host) - .map(|_| None) - .map_err(|err| CliError::from_boxed(err, 101)) -} diff --git a/src/bin/test.rs b/src/bin/test.rs deleted file mode 100644 index 8366a0badad..00000000000 --- a/src/bin/test.rs +++ /dev/null @@ -1,107 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Human, Config}; -use cargo::util::important_paths::{find_root_manifest_for_cwd}; - -#[derive(RustcDecodable)] -struct Options { - arg_args: Vec, - flag_features: Vec, - flag_jobs: Option, - flag_manifest_path: Option, - flag_no_default_features: bool, - flag_no_run: bool, - flag_package: Option, - flag_target: Option, - flag_lib: bool, - flag_bin: Vec, - flag_example: Vec, - flag_test: Vec, - flag_bench: Vec, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_release: bool, -} - -pub const USAGE: &'static str = " -Execute all unit and integration tests of a local package - -Usage: - cargo test [options] [--] [...] - -Options: - -h, --help Print this message - --lib Test only this package's library - --bin NAME Test only the specified binary - --example NAME Test only the specified example - --test NAME Test only the specified integration test target - --bench NAME Test only the specified benchmark target - --no-run Compile, but don't run tests - -p SPEC, --package SPEC Package to run tests for - -j N, --jobs N The number of jobs to run in parallel - --release Build artifacts in release mode, with optimizations - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to build tests for - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -All of the trailing arguments are passed to the test binaries generated for -filtering tests and generally providing options configuring how they run. For -example, this will run all tests with the name `foo` in their name: - - cargo test foo - -If the --package argument is given, then SPEC is a package id specification -which indicates which package should be tested. If it is not given, then the -current package is tested. For more information on SPEC and its format, see the -`cargo help pkgid` command. - -The --jobs argument affects the building of the test executable but does -not affect how many jobs are used when running the tests. - -Compilation can be configured via the `test` profile in the manifest. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - - let ops = ops::TestOptions { - no_run: options.flag_no_run, - compile_opts: ops::CompileOptions { - config: config, - jobs: options.flag_jobs, - target: options.flag_target.as_ref().map(|s| &s[..]), - features: &options.flag_features, - no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), - exec_engine: None, - release: options.flag_release, - mode: ops::CompileMode::Test, - filter: ops::CompileFilter::new(options.flag_lib, - &options.flag_bin, - &options.flag_test, - &options.flag_example, - &options.flag_bench), - target_rustc_args: None, - }, - }; - - let err = try!(ops::run_tests(&root, &ops, - &options.arg_args).map_err(|err| { - CliError::from_boxed(err, 101) - })); - match err { - None => Ok(None), - Some(err) => { - Err(match err.exit.as_ref().and_then(|e| e.code()) { - Some(i) => CliError::new("", i), - None => CliError::from_error(Human(err), 101) - }) - } - } -} diff --git a/src/bin/update.rs b/src/bin/update.rs deleted file mode 100644 index cd4cd1173aa..00000000000 --- a/src/bin/update.rs +++ /dev/null @@ -1,73 +0,0 @@ -use std::env; - -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; -use cargo::util::important_paths::find_root_manifest_for_cwd; - -#[derive(RustcDecodable)] -struct Options { - flag_package: Option, - flag_aggressive: bool, - flag_precise: Option, - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, -} - -pub const USAGE: &'static str = " -Update dependencies as recorded in the local lock file. - -Usage: - cargo update [options] - -Options: - -h, --help Print this message - -p SPEC, --package SPEC Package to update - --aggressive Force updating all dependencies of as well - --precise PRECISE Update a single dependency to exactly PRECISE - --manifest-path PATH Path to the manifest to compile - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -This command requires that a `Cargo.lock` already exists as generated by -`cargo build` or related commands. - -If SPEC is given, then a conservative update of the lockfile will be -performed. This means that only the dependency specified by SPEC will be -updated. Its transitive dependencies will be updated only if SPEC cannot be -updated without updating dependencies. All other dependencies will remain -locked at their currently recorded versions. - -If PRECISE is specified, then --aggressive must not also be specified. The -argument PRECISE is a string representing a precise revision that the package -being updated should be updated to. For example, if the package comes from a git -repository, then PRECISE would be the exact revision that the repository should -be updated to. - -If SPEC is not given, then all dependencies will be re-resolved and -updated. - -For more information about package id specifications, see `cargo help pkgid`. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - debug!("executing; cmd=cargo-update; args={:?}", env::args().collect::>()); - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - - let spec = options.flag_package.as_ref(); - - let update_opts = ops::UpdateOptions { - aggressive: options.flag_aggressive, - precise: options.flag_precise.as_ref().map(|s| &s[..]), - to_update: spec.map(|s| &s[..]), - config: config, - }; - - ops::update_lockfile(&root, &update_opts) - .map(|_| None).map_err(|err| CliError::from_boxed(err, 101)) -} - diff --git a/src/bin/verify_project.rs b/src/bin/verify_project.rs deleted file mode 100644 index b4998282e1d..00000000000 --- a/src/bin/verify_project.rs +++ /dev/null @@ -1,65 +0,0 @@ -use std::collections::HashMap; -use std::fs::File; -use std::io::prelude::*; -use std::process; - -use cargo::util::important_paths::{find_root_manifest_for_cwd}; -use cargo::util::{CliResult, Config}; -use rustc_serialize::json; -use toml; - -pub type Error = HashMap; - -#[derive(RustcDecodable)] -struct Flags { - flag_manifest_path: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, -} - -pub const USAGE: &'static str = " -Usage: - cargo verify-project [options] - cargo verify-project -h | --help - -Options: - -h, --help Print this message - --manifest-path PATH Path to the manifest to verify - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never -"; - -pub fn execute(args: Flags, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(args.flag_verbose, args.flag_quiet)); - try!(config.shell().set_color_config(args.flag_color.as_ref().map(|s| &s[..]))); - - let mut contents = String::new(); - let filename = args.flag_manifest_path.unwrap_or("Cargo.toml".into()); - let filename = match find_root_manifest_for_cwd(Some(filename)) { - Ok(manifest_path) => manifest_path, - Err(e) => fail("invalid", &e.to_string()), - }; - - let file = File::open(&filename); - match file.and_then(|mut f| f.read_to_string(&mut contents)) { - Ok(_) => {}, - Err(e) => fail("invalid", &format!("error reading file: {}", e)) - }; - match toml::Parser::new(&contents).parse() { - None => fail("invalid", "invalid-format"), - Some(..) => {} - }; - - let mut h = HashMap::new(); - h.insert("success".to_string(), "true".to_string()); - Ok(Some(h)) -} - -fn fail(reason: &str, value: &str) -> ! { - let mut h = HashMap::new(); - h.insert(reason.to_string(), value.to_string()); - println!("{}", json::encode(&h).unwrap()); - process::exit(1) -} diff --git a/src/bin/version.rs b/src/bin/version.rs deleted file mode 100644 index 12c954f58c4..00000000000 --- a/src/bin/version.rs +++ /dev/null @@ -1,25 +0,0 @@ -use std::env; - -use cargo; -use cargo::util::{CliResult, Config}; - -#[derive(RustcDecodable)] -struct Options; - -pub const USAGE: &'static str = " -Usage: - cargo version [options] - -Options: - -h, --help Print this message - -v, --verbose Use verbose output - --color WHEN Coloring: auto, always, never -"; - -pub fn execute(_: Options, _: &Config) -> CliResult> { - debug!("executing; cmd=cargo-version; args={:?}", env::args().collect::>()); - - println!("{}", cargo::version()); - - Ok(None) -} diff --git a/src/bin/yank.rs b/src/bin/yank.rs deleted file mode 100644 index d5322eb55a5..00000000000 --- a/src/bin/yank.rs +++ /dev/null @@ -1,56 +0,0 @@ -use cargo::ops; -use cargo::util::{CliResult, CliError, Config}; - -#[derive(RustcDecodable)] -struct Options { - arg_crate: Option, - flag_token: Option, - flag_vers: Option, - flag_index: Option, - flag_verbose: bool, - flag_quiet: bool, - flag_color: Option, - flag_undo: bool, -} - -pub static USAGE: &'static str = " -Remove a pushed crate from the index - -Usage: - cargo yank [options] [] - -Options: - -h, --help Print this message - --vers VERSION The version to yank or un-yank - --undo Undo a yank, putting a version back into the index - --index INDEX Registry index to yank from - --token TOKEN API token to use when authenticating - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - -The yank command removes a previously pushed crate's version from the server's -index. This command does not delete any data, and the crate will still be -available for download via the registry's download link. - -Note that existing crates locked to a yanked version will still be able to -download the yanked version to use it. Cargo will, however, not allow any new -crates to be locked to any yanked version. -"; - -pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.shell().set_verbosity(options.flag_verbose, options.flag_quiet)); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); - try!(ops::yank(config, - options.arg_crate, - options.flag_vers, - options.flag_token, - options.flag_index, - options.flag_undo).map_err(|e| { - CliError::from_boxed(e, 101) - })); - Ok(None) -} - - - diff --git a/src/cargo/core/compiler/build_config.rs b/src/cargo/core/compiler/build_config.rs new file mode 100644 index 00000000000..7f795c442cd --- /dev/null +++ b/src/cargo/core/compiler/build_config.rs @@ -0,0 +1,232 @@ +use std::cell::RefCell; +use std::path::Path; + +use serde::ser; + +use crate::util::ProcessBuilder; +use crate::util::{CargoResult, CargoResultExt, Config, RustfixDiagnosticServer}; + +/// Configuration information for a rustc build. +#[derive(Debug)] +pub struct BuildConfig { + /// The target arch triple. + /// Default: host arch. + pub requested_target: Option, + /// Number of rustc jobs to run in parallel. + pub jobs: u32, + /// `true` if we are building for release. + pub release: bool, + /// The mode we are compiling in. + pub mode: CompileMode, + /// `true` to print stdout in JSON format (for machine reading). + pub message_format: MessageFormat, + /// Force Cargo to do a full rebuild and treat each target as changed. + pub force_rebuild: bool, + /// Output a build plan to stdout instead of actually compiling. + pub build_plan: bool, + /// An optional override of the rustc path for primary units only + pub primary_unit_rustc: Option, + pub rustfix_diagnostic_server: RefCell>, + /// Whether or not Cargo should cache compiler output on disk. + cache_messages: bool, +} + +impl BuildConfig { + /// Parses all config files to learn about build configuration. Currently + /// configured options are: + /// + /// * `build.jobs` + /// * `build.target` + /// * `target.$target.ar` + /// * `target.$target.linker` + /// * `target.$target.libfoo.metadata` + pub fn new( + config: &Config, + jobs: Option, + requested_target: &Option, + mode: CompileMode, + ) -> CargoResult { + let requested_target = match requested_target { + &Some(ref target) if target.ends_with(".json") => { + let path = Path::new(target).canonicalize().chain_err(|| { + failure::format_err!("Target path {:?} is not a valid file", target) + })?; + Some( + path.into_os_string() + .into_string() + .map_err(|_| failure::format_err!("Target path is not valid unicode"))?, + ) + } + other => other.clone(), + }; + if let Some(ref s) = requested_target { + if s.trim().is_empty() { + failure::bail!("target was empty") + } + } + let cfg_target = match config.get_string("build.target")? { + Some(ref target) if target.val.ends_with(".json") => { + let path = target.definition.root(config).join(&target.val); + let path_string = path + .into_os_string() + .into_string() + .map_err(|_| failure::format_err!("Target path is not valid unicode")); + Some(path_string?) + } + other => other.map(|t| t.val), + }; + let target = requested_target.or(cfg_target); + + if jobs == Some(0) { + failure::bail!("jobs must be at least 1") + } + if jobs.is_some() && config.jobserver_from_env().is_some() { + config.shell().warn( + "a `-j` argument was passed to Cargo but Cargo is \ + also configured with an external jobserver in \ + its environment, ignoring the `-j` parameter", + )?; + } + let cfg_jobs: Option = config.get("build.jobs")?; + let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32); + + Ok(BuildConfig { + requested_target: target, + jobs, + release: false, + mode, + message_format: MessageFormat::Human, + force_rebuild: false, + build_plan: false, + primary_unit_rustc: None, + rustfix_diagnostic_server: RefCell::new(None), + cache_messages: config.cli_unstable().cache_messages, + }) + } + + /// Whether or not Cargo should cache compiler messages on disk. + pub fn cache_messages(&self) -> bool { + self.cache_messages + } + + /// Whether or not the *user* wants JSON output. Whether or not rustc + /// actually uses JSON is decided in `add_error_format`. + pub fn emit_json(&self) -> bool { + self.message_format == MessageFormat::Json + } + + pub fn test(&self) -> bool { + self.mode == CompileMode::Test || self.mode == CompileMode::Bench + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum MessageFormat { + Human, + Json, + Short, +} + +/// The general "mode" for what to do. +/// This is used for two purposes. The commands themselves pass this in to +/// `compile_ws` to tell it the general execution strategy. This influences +/// the default targets selected. The other use is in the `Unit` struct +/// to indicate what is being done with a specific target. +#[derive(Clone, Copy, PartialEq, Debug, Eq, Hash, PartialOrd, Ord)] +pub enum CompileMode { + /// A target being built for a test. + Test, + /// Building a target with `rustc` (lib or bin). + Build, + /// Building a target with `rustc` to emit `rmeta` metadata only. If + /// `test` is true, then it is also compiled with `--test` to check it like + /// a test. + Check { test: bool }, + /// Used to indicate benchmarks should be built. This is not used in + /// `Target`, because it is essentially the same as `Test` (indicating + /// `--test` should be passed to rustc) and by using `Test` instead it + /// allows some de-duping of Units to occur. + Bench, + /// A target that will be documented with `rustdoc`. + /// If `deps` is true, then it will also document all dependencies. + Doc { deps: bool }, + /// A target that will be tested with `rustdoc`. + Doctest, + /// A marker for Units that represent the execution of a `build.rs` script. + RunCustomBuild, +} + +impl ser::Serialize for CompileMode { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + use self::CompileMode::*; + match *self { + Test => "test".serialize(s), + Build => "build".serialize(s), + Check { .. } => "check".serialize(s), + Bench => "bench".serialize(s), + Doc { .. } => "doc".serialize(s), + Doctest => "doctest".serialize(s), + RunCustomBuild => "run-custom-build".serialize(s), + } + } +} + +impl CompileMode { + /// Returns `true` if the unit is being checked. + pub fn is_check(self) -> bool { + match self { + CompileMode::Check { .. } => true, + _ => false, + } + } + + /// Returns `true` if this is generating documentation. + pub fn is_doc(self) -> bool { + match self { + CompileMode::Doc { .. } => true, + _ => false, + } + } + + /// Returns `true` if this a doc test. + pub fn is_doc_test(self) -> bool { + self == CompileMode::Doctest + } + + /// Returns `true` if this is any type of test (test, benchmark, doc test, or + /// check test). + pub fn is_any_test(self) -> bool { + match self { + CompileMode::Test + | CompileMode::Bench + | CompileMode::Check { test: true } + | CompileMode::Doctest => true, + _ => false, + } + } + + /// Returns `true` if this is the *execution* of a `build.rs` script. + pub fn is_run_custom_build(self) -> bool { + self == CompileMode::RunCustomBuild + } + + /// List of all modes (currently used by `cargo clean -p` for computing + /// all possible outputs). + pub fn all_modes() -> &'static [CompileMode] { + static ALL: [CompileMode; 9] = [ + CompileMode::Test, + CompileMode::Build, + CompileMode::Check { test: true }, + CompileMode::Check { test: false }, + CompileMode::Bench, + CompileMode::Doc { deps: true }, + CompileMode::Doc { deps: false }, + CompileMode::Doctest, + CompileMode::RunCustomBuild, + ]; + &ALL + } +} diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs new file mode 100644 index 00000000000..3e67718fbfe --- /dev/null +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -0,0 +1,290 @@ +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::str; + +use log::debug; + +use crate::core::compiler::unit::UnitInterner; +use crate::core::compiler::{BuildConfig, BuildOutput, Kind, Unit}; +use crate::core::profiles::Profiles; +use crate::core::{Dependency, Workspace}; +use crate::core::{PackageId, PackageSet, Resolve}; +use crate::util::errors::CargoResult; +use crate::util::{profile, Cfg, Config, Platform, Rustc}; + +mod target_info; +pub use self::target_info::{FileFlavor, TargetInfo}; + +/// The build context, containing all information about a build task. +pub struct BuildContext<'a, 'cfg> { + /// The workspace the build is for. + pub ws: &'a Workspace<'cfg>, + /// The cargo configuration. + pub config: &'cfg Config, + /// The dependency graph for our build. + pub resolve: &'a Resolve, + pub profiles: &'a Profiles, + pub build_config: &'a BuildConfig, + /// Extra compiler args for either `rustc` or `rustdoc`. + pub extra_compiler_args: HashMap, Vec>, + pub packages: &'a PackageSet<'cfg>, + + /// Information about the compiler. + pub rustc: Rustc, + /// Build information for the host arch. + pub host_config: TargetConfig, + /// Build information for the target. + pub target_config: TargetConfig, + pub target_info: TargetInfo, + pub host_info: TargetInfo, + pub units: &'a UnitInterner<'a>, +} + +impl<'a, 'cfg> BuildContext<'a, 'cfg> { + pub fn new( + ws: &'a Workspace<'cfg>, + resolve: &'a Resolve, + packages: &'a PackageSet<'cfg>, + config: &'cfg Config, + build_config: &'a BuildConfig, + profiles: &'a Profiles, + units: &'a UnitInterner<'a>, + extra_compiler_args: HashMap, Vec>, + ) -> CargoResult> { + let rustc = config.load_global_rustc(Some(ws))?; + + let host_config = TargetConfig::new(config, &rustc.host)?; + let target_config = match build_config.requested_target.as_ref() { + Some(triple) => TargetConfig::new(config, triple)?, + None => host_config.clone(), + }; + let (host_info, target_info) = { + let _p = profile::start("BuildContext::probe_target_info"); + debug!("probe_target_info"); + let host_info = + TargetInfo::new(config, &build_config.requested_target, &rustc, Kind::Host)?; + let target_info = + TargetInfo::new(config, &build_config.requested_target, &rustc, Kind::Target)?; + (host_info, target_info) + }; + + Ok(BuildContext { + ws, + resolve, + packages, + config, + rustc, + target_config, + target_info, + host_config, + host_info, + build_config, + profiles, + extra_compiler_args, + units, + }) + } + + pub fn extern_crate_name(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> CargoResult { + self.resolve + .extern_crate_name(unit.pkg.package_id(), dep.pkg.package_id(), dep.target) + } + + pub fn is_public_dependency(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> bool { + self.resolve + .is_public_dep(unit.pkg.package_id(), dep.pkg.package_id()) + } + + /// Whether a given platform matches the host or target platform, + /// specified by `Kind`. + pub fn platform_activated(&self, platform: Option<&Platform>, kind: Kind) -> bool { + let platform = match platform { + Some(p) => p, + None => return true, + }; + let (name, info) = match kind { + Kind::Host => (self.host_triple(), &self.host_info), + Kind::Target => (self.target_triple(), &self.target_info), + }; + platform.matches(name, info.cfg()) + } + + /// Whether a dependency should be compiled for the host or target platform, + /// specified by `Kind`. + pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + self.platform_activated(dep.platform(), kind) + } + + /// Gets the user-specified linker for a particular host or target + pub fn linker(&self, kind: Kind) -> Option<&Path> { + self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) + } + + /// Gets the user-specified `ar` program for a particular host or target. + pub fn ar(&self, kind: Kind) -> Option<&Path> { + self.target_config(kind).ar.as_ref().map(|s| s.as_ref()) + } + + /// Gets the list of `cfg`s printed out from the compiler for the specified kind. + pub fn cfg(&self, kind: Kind) -> &[Cfg] { + let info = match kind { + Kind::Host => &self.host_info, + Kind::Target => &self.target_info, + }; + info.cfg() + } + + /// Gets the host architecture triple. + /// + /// For example, x86_64-unknown-linux-gnu, would be + /// - machine: x86_64, + /// - hardware-platform: unknown, + /// - operating system: linux-gnu. + pub fn host_triple(&self) -> &str { + &self.rustc.host + } + + pub fn target_triple(&self) -> &str { + self.build_config + .requested_target + .as_ref() + .map(|s| s.as_str()) + .unwrap_or_else(|| self.host_triple()) + } + + /// Gets the target configuration for a particular host or target. + fn target_config(&self, kind: Kind) -> &TargetConfig { + match kind { + Kind::Host => &self.host_config, + Kind::Target => &self.target_config, + } + } + + /// Gets the number of jobs specified for this build. + pub fn jobs(&self) -> u32 { + self.build_config.jobs + } + + pub fn rustflags_args(&self, unit: &Unit<'_>) -> &[String] { + &self.info(unit.kind).rustflags + } + + pub fn rustdocflags_args(&self, unit: &Unit<'_>) -> &[String] { + &self.info(unit.kind).rustdocflags + } + + pub fn show_warnings(&self, pkg: PackageId) -> bool { + pkg.source_id().is_path() || self.config.extra_verbose() + } + + fn info(&self, kind: Kind) -> &TargetInfo { + match kind { + Kind::Host => &self.host_info, + Kind::Target => &self.target_info, + } + } + + pub fn extra_args_for(&self, unit: &Unit<'a>) -> Option<&Vec> { + self.extra_compiler_args.get(unit) + } +} + +/// Information required to build for a target. +#[derive(Clone, Default)] +pub struct TargetConfig { + /// The path of archiver (lib builder) for this target. + pub ar: Option, + /// The path of the linker for this target. + pub linker: Option, + /// Special build options for any necessary input files (filename -> options). + pub overrides: HashMap, +} + +impl TargetConfig { + pub fn new(config: &Config, triple: &str) -> CargoResult { + let key = format!("target.{}", triple); + let mut ret = TargetConfig { + ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val), + linker: config.get_path(&format!("{}.linker", key))?.map(|v| v.val), + overrides: HashMap::new(), + }; + let table = match config.get_table(&key)? { + Some(table) => table.val, + None => return Ok(ret), + }; + for (lib_name, value) in table { + match lib_name.as_str() { + "ar" | "linker" | "runner" | "rustflags" => continue, + _ => {} + } + + let mut output = BuildOutput { + library_paths: Vec::new(), + library_links: Vec::new(), + linker_args: Vec::new(), + cfgs: Vec::new(), + env: Vec::new(), + metadata: Vec::new(), + rerun_if_changed: Vec::new(), + rerun_if_env_changed: Vec::new(), + warnings: Vec::new(), + }; + // We require deterministic order of evaluation, so we must sort the pairs by key first. + let mut pairs = Vec::new(); + for (k, value) in value.table(&lib_name)?.0 { + pairs.push((k, value)); + } + pairs.sort_by_key(|p| p.0); + for (k, value) in pairs { + let key = format!("{}.{}", key, k); + match &k[..] { + "rustc-flags" => { + let (flags, definition) = value.string(k)?; + let whence = format!("in `{}` (in {})", key, definition.display()); + let (paths, links) = BuildOutput::parse_rustc_flags(flags, &whence)?; + output.library_paths.extend(paths); + output.library_links.extend(links); + } + "rustc-link-lib" => { + let list = value.list(k)?; + output + .library_links + .extend(list.iter().map(|v| v.0.clone())); + } + "rustc-link-search" => { + let list = value.list(k)?; + output + .library_paths + .extend(list.iter().map(|v| PathBuf::from(&v.0))); + } + "rustc-cdylib-link-arg" => { + let args = value.list(k)?; + output.linker_args.extend(args.iter().map(|v| v.0.clone())); + } + "rustc-cfg" => { + let list = value.list(k)?; + output.cfgs.extend(list.iter().map(|v| v.0.clone())); + } + "rustc-env" => { + for (name, val) in value.table(k)?.0 { + let val = val.string(name)?.0; + output.env.push((name.clone(), val.to_string())); + } + } + "warning" | "rerun-if-changed" | "rerun-if-env-changed" => { + failure::bail!("`{}` is not supported in build script overrides", k); + } + _ => { + let val = value.string(k)?.0; + output.metadata.push((k.clone(), val.to_string())); + } + } + } + ret.overrides.insert(lib_name, output); + } + + Ok(ret) + } +} diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs new file mode 100644 index 00000000000..ecdb4239c65 --- /dev/null +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -0,0 +1,473 @@ +use std::cell::RefCell; +use std::collections::hash_map::{Entry, HashMap}; +use std::env; +use std::path::PathBuf; +use std::str::{self, FromStr}; + +use crate::core::compiler::Kind; +use crate::core::TargetKind; +use crate::util::CfgExpr; +use crate::util::{CargoResult, CargoResultExt, Cfg, Config, ProcessBuilder, Rustc}; + +/// Information about the platform target gleaned from querying rustc. +/// +/// The `BuildContext` keeps two of these, one for the host and one for the +/// target. If no target is specified, it uses a clone from the host. +#[derive(Clone)] +pub struct TargetInfo { + /// A base process builder for discovering crate type information. In + /// particular, this is used to determine the output filename prefix and + /// suffix for a crate type. + crate_type_process: ProcessBuilder, + /// Cache of output filename prefixes and suffixes. + /// + /// The key is the crate type name (like `cdylib`) and the value is + /// `Some((prefix, suffix))`, for example `libcargo.so` would be + /// `Some(("lib", ".so")). The value is `None` if the crate type is not + /// supported. + crate_types: RefCell>>, + /// `cfg` information extracted from `rustc --print=cfg`. + cfg: Vec, + /// Path to the "lib" directory in the sysroot. + pub sysroot_libdir: PathBuf, + /// Extra flags to pass to `rustc`, see `env_args`. + pub rustflags: Vec, + /// Extra flags to pass to `rustdoc`, see `env_args`. + pub rustdocflags: Vec, +} + +/// Kind of each file generated by a Unit, part of `FileType`. +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum FileFlavor { + /// Not a special file type. + Normal, + /// Something you can link against (e.g., a library). + Linkable { rmeta: bool }, + /// Piece of external debug information (e.g., `.dSYM`/`.pdb` file). + DebugInfo, +} + +/// Type of each file generated by a Unit. +pub struct FileType { + /// The kind of file. + pub flavor: FileFlavor, + /// The suffix for the file (for example, `.rlib`). + suffix: String, + /// The prefix for the file (for example, `lib`). + prefix: String, + // Wasm bin target will generate two files in deps such as + // "web-stuff.js" and "web_stuff.wasm". Note the different usages of + // "-" and "_". should_replace_hyphens is a flag to indicate that + // we need to convert the stem "web-stuff" to "web_stuff", so we + // won't miss "web_stuff.wasm". + should_replace_hyphens: bool, +} + +impl FileType { + pub fn filename(&self, stem: &str) -> String { + let stem = if self.should_replace_hyphens { + stem.replace("-", "_") + } else { + stem.to_string() + }; + format!("{}{}{}", self.prefix, stem, self.suffix) + } +} + +impl TargetInfo { + pub fn new( + config: &Config, + requested_target: &Option, + rustc: &Rustc, + kind: Kind, + ) -> CargoResult { + let rustflags = env_args( + config, + requested_target, + &rustc.host, + None, + kind, + "RUSTFLAGS", + )?; + let mut process = rustc.process(); + process + .arg("-") + .arg("--crate-name") + .arg("___") + .arg("--print=file-names") + .args(&rustflags) + .env_remove("RUSTC_LOG"); + + let target_triple = requested_target + .as_ref() + .map(|s| s.as_str()) + .unwrap_or(&rustc.host); + if kind == Kind::Target { + process.arg("--target").arg(target_triple); + } + + let crate_type_process = process.clone(); + const KNOWN_CRATE_TYPES: &[&str] = + &["bin", "rlib", "dylib", "cdylib", "staticlib", "proc-macro"]; + for crate_type in KNOWN_CRATE_TYPES.iter() { + process.arg("--crate-type").arg(crate_type); + } + + process.arg("--print=sysroot"); + process.arg("--print=cfg"); + + let (output, error) = rustc + .cached_output(&process) + .chain_err(|| "failed to run `rustc` to learn about target-specific information")?; + + let mut lines = output.lines(); + let mut map = HashMap::new(); + for crate_type in KNOWN_CRATE_TYPES { + let out = parse_crate_type(crate_type, &process, &output, &error, &mut lines)?; + map.insert(crate_type.to_string(), out); + } + + let line = match lines.next() { + Some(line) => line, + None => failure::bail!( + "output of --print=sysroot missing when learning about \ + target-specific information from rustc\n{}", + output_err_info(&process, &output, &error) + ), + }; + let mut rustlib = PathBuf::from(line); + let sysroot_libdir = match kind { + Kind::Host => { + if cfg!(windows) { + rustlib.push("bin"); + } else { + rustlib.push("lib"); + } + rustlib + } + Kind::Target => { + rustlib.push("lib"); + rustlib.push("rustlib"); + rustlib.push(target_triple); + rustlib.push("lib"); + rustlib + } + }; + + let cfg = lines.map(Cfg::from_str).collect::>>()?; + + Ok(TargetInfo { + crate_type_process, + crate_types: RefCell::new(map), + sysroot_libdir, + // recalculate `rustflags` from above now that we have `cfg` + // information + rustflags: env_args( + config, + requested_target, + &rustc.host, + Some(&cfg), + kind, + "RUSTFLAGS", + )?, + rustdocflags: env_args( + config, + requested_target, + &rustc.host, + Some(&cfg), + kind, + "RUSTDOCFLAGS", + )?, + cfg, + }) + } + + pub fn cfg(&self) -> &[Cfg] { + &self.cfg + } + + pub fn file_types( + &self, + crate_type: &str, + flavor: FileFlavor, + kind: &TargetKind, + target_triple: &str, + ) -> CargoResult>> { + let mut crate_types = self.crate_types.borrow_mut(); + let entry = crate_types.entry(crate_type.to_string()); + let crate_type_info = match entry { + Entry::Occupied(o) => &*o.into_mut(), + Entry::Vacant(v) => { + let value = self.discover_crate_type(v.key())?; + &*v.insert(value) + } + }; + let (prefix, suffix) = match *crate_type_info { + Some((ref prefix, ref suffix)) => (prefix, suffix), + None => return Ok(None), + }; + let mut ret = vec![FileType { + suffix: suffix.clone(), + prefix: prefix.clone(), + flavor, + should_replace_hyphens: false, + }]; + + // See rust-lang/cargo#4500. + if target_triple.ends_with("pc-windows-msvc") + && crate_type.ends_with("dylib") + && suffix == ".dll" + { + ret.push(FileType { + suffix: ".dll.lib".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::Normal, + should_replace_hyphens: false, + }) + } + + // See rust-lang/cargo#4535. + if target_triple.starts_with("wasm32-") && crate_type == "bin" && suffix == ".js" { + ret.push(FileType { + suffix: ".wasm".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::Normal, + should_replace_hyphens: true, + }) + } + + // See rust-lang/cargo#4490, rust-lang/cargo#4960. + // Only uplift debuginfo for binaries. + // - Tests are run directly from `target/debug/deps/` with the + // metadata hash still in the filename. + // - Examples are only uplifted for apple because the symbol file + // needs to match the executable file name to be found (i.e., it + // needs to remove the hash in the filename). On Windows, the path + // to the .pdb with the hash is embedded in the executable. + let is_apple = target_triple.contains("-apple-"); + if *kind == TargetKind::Bin || (*kind == TargetKind::ExampleBin && is_apple) { + if is_apple { + ret.push(FileType { + suffix: ".dSYM".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::DebugInfo, + should_replace_hyphens: false, + }) + } else if target_triple.ends_with("-msvc") { + ret.push(FileType { + suffix: ".pdb".to_string(), + prefix: prefix.clone(), + flavor: FileFlavor::DebugInfo, + should_replace_hyphens: false, + }) + } + } + + Ok(Some(ret)) + } + + fn discover_crate_type(&self, crate_type: &str) -> CargoResult> { + let mut process = self.crate_type_process.clone(); + + process.arg("--crate-type").arg(crate_type); + + let output = process.exec_with_output().chain_err(|| { + format!( + "failed to run `rustc` to learn about crate-type {} information", + crate_type + ) + })?; + + let error = str::from_utf8(&output.stderr).unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + Ok(parse_crate_type( + crate_type, + &process, + output, + error, + &mut output.lines(), + )?) + } +} + +/// Takes rustc output (using specialized command line args), and calculates the file prefix and +/// suffix for the given crate type, or returns `None` if the type is not supported. (e.g., for a +/// Rust library like `libcargo.rlib`, we have prefix "lib" and suffix "rlib"). +/// +/// The caller needs to ensure that the lines object is at the correct line for the given crate +/// type: this is not checked. +// +// This function can not handle more than one file per type (with wasm32-unknown-emscripten, there +// are two files for bin (`.wasm` and `.js`)). +fn parse_crate_type( + crate_type: &str, + cmd: &ProcessBuilder, + output: &str, + error: &str, + lines: &mut str::Lines<'_>, +) -> CargoResult> { + let not_supported = error.lines().any(|line| { + (line.contains("unsupported crate type") || line.contains("unknown crate type")) + && line.contains(crate_type) + }); + if not_supported { + return Ok(None); + } + let line = match lines.next() { + Some(line) => line, + None => failure::bail!( + "malformed output when learning about crate-type {} information\n{}", + crate_type, + output_err_info(cmd, output, error) + ), + }; + let mut parts = line.trim().split("___"); + let prefix = parts.next().unwrap(); + let suffix = match parts.next() { + Some(part) => part, + None => failure::bail!( + "output of --print=file-names has changed in the compiler, cannot parse\n{}", + output_err_info(cmd, output, error) + ), + }; + + Ok(Some((prefix.to_string(), suffix.to_string()))) +} + +/// Helper for creating an error message when parsing rustc output fails. +fn output_err_info(cmd: &ProcessBuilder, stdout: &str, stderr: &str) -> String { + let mut result = format!("command was: {}\n", cmd); + if !stdout.is_empty() { + result.push_str("\n--- stdout\n"); + result.push_str(stdout); + } + if !stderr.is_empty() { + result.push_str("\n--- stderr\n"); + result.push_str(stderr); + } + if stdout.is_empty() && stderr.is_empty() { + result.push_str("(no output received)"); + } + result +} + +/// Acquire extra flags to pass to the compiler from various locations. +/// +/// The locations are: +/// +/// - the `RUSTFLAGS` environment variable +/// +/// then if this was not found +/// +/// - `target.*.rustflags` from the manifest (Cargo.toml) +/// - `target.cfg(..).rustflags` from the manifest +/// +/// then if neither of these were found +/// +/// - `build.rustflags` from the manifest +/// +/// Note that if a `target` is specified, no args will be passed to host code (plugins, build +/// scripts, ...), even if it is the same as the target. +fn env_args( + config: &Config, + requested_target: &Option, + host_triple: &str, + target_cfg: Option<&[Cfg]>, + kind: Kind, + name: &str, +) -> CargoResult> { + // We *want* to apply RUSTFLAGS only to builds for the + // requested target architecture, and not to things like build + // scripts and plugins, which may be for an entirely different + // architecture. Cargo's present architecture makes it quite + // hard to only apply flags to things that are not build + // scripts and plugins though, so we do something more hacky + // instead to avoid applying the same RUSTFLAGS to multiple targets + // arches: + // + // 1) If --target is not specified we just apply RUSTFLAGS to + // all builds; they are all going to have the same target. + // + // 2) If --target *is* specified then we only apply RUSTFLAGS + // to compilation units with the Target kind, which indicates + // it was chosen by the --target flag. + // + // This means that, e.g., even if the specified --target is the + // same as the host, build scripts in plugins won't get + // RUSTFLAGS. + let compiling_with_target = requested_target.is_some(); + let is_target_kind = kind == Kind::Target; + + if compiling_with_target && !is_target_kind { + // This is probably a build script or plugin and we're + // compiling with --target. In this scenario there are + // no rustflags we can apply. + return Ok(Vec::new()); + } + + // First try RUSTFLAGS from the environment + if let Ok(a) = env::var(name) { + let args = a + .split(' ') + .map(str::trim) + .filter(|s| !s.is_empty()) + .map(str::to_string); + return Ok(args.collect()); + } + + let mut rustflags = Vec::new(); + + let name = name + .chars() + .flat_map(|c| c.to_lowercase()) + .collect::(); + // Then the target.*.rustflags value... + let target = requested_target + .as_ref() + .map(|s| s.as_str()) + .unwrap_or(host_triple); + let key = format!("target.{}.{}", target, name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + rustflags.extend(args); + } + // ...including target.'cfg(...)'.rustflags + if let Some(target_cfg) = target_cfg { + if let Some(table) = config.get_table("target")? { + let cfgs = table + .val + .keys() + .filter(|key| CfgExpr::matches_key(key, target_cfg)); + + // Note that we may have multiple matching `[target]` sections and + // because we're passing flags to the compiler this can affect + // cargo's caching and whether it rebuilds. Ensure a deterministic + // ordering through sorting for now. We may perhaps one day wish to + // ensure a deterministic ordering via the order keys were defined + // in files perhaps. + let mut cfgs = cfgs.collect::>(); + cfgs.sort(); + + for n in cfgs { + let key = format!("target.{}.{}", n, name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + rustflags.extend(args); + } + } + } + } + + if !rustflags.is_empty() { + return Ok(rustflags); + } + + // Then the `build.rustflags` value. + let key = format!("build.{}", name); + if let Some(args) = config.get_list_or_split_string(&key)? { + let args = args.val.into_iter(); + return Ok(args.collect()); + } + + Ok(Vec::new()) +} diff --git a/src/cargo/core/compiler/build_plan.rs b/src/cargo/core/compiler/build_plan.rs new file mode 100644 index 00000000000..cfdd1a01523 --- /dev/null +++ b/src/cargo/core/compiler/build_plan.rs @@ -0,0 +1,162 @@ +//! A graph-like structure used to represent the rustc commands to build the package and the +//! interdependencies between them. +//! +//! The BuildPlan structure is used to store the dependency graph of a dry run so that it can be +//! shared with an external build system. Each Invocation in the BuildPlan comprises a single +//! subprocess and defines the build environment, the outputs produced by the subprocess, and the +//! dependencies on other Invocations. + +use std::collections::BTreeMap; +use std::path::PathBuf; + +use serde::Serialize; + +use super::context::OutputFile; +use super::{CompileMode, Context, Kind, Unit}; +use crate::core::TargetKind; +use crate::util::{internal, CargoResult, ProcessBuilder}; + +#[derive(Debug, Serialize)] +struct Invocation { + package_name: String, + package_version: semver::Version, + target_kind: TargetKind, + kind: Kind, + compile_mode: CompileMode, + deps: Vec, + outputs: Vec, + links: BTreeMap, + program: String, + args: Vec, + env: BTreeMap, + cwd: Option, +} + +#[derive(Debug)] +pub struct BuildPlan { + invocation_map: BTreeMap, + plan: SerializedBuildPlan, +} + +#[derive(Debug, Serialize)] +struct SerializedBuildPlan { + invocations: Vec, + inputs: Vec, +} + +impl Invocation { + pub fn new(unit: &Unit<'_>, deps: Vec) -> Invocation { + let id = unit.pkg.package_id(); + Invocation { + package_name: id.name().to_string(), + package_version: id.version().clone(), + kind: unit.kind, + target_kind: unit.target.kind().clone(), + compile_mode: unit.mode, + deps, + outputs: Vec::new(), + links: BTreeMap::new(), + program: String::new(), + args: Vec::new(), + env: BTreeMap::new(), + cwd: None, + } + } + + pub fn add_output(&mut self, path: &PathBuf, link: &Option) { + self.outputs.push(path.clone()); + if let Some(ref link) = *link { + self.links.insert(link.clone(), path.clone()); + } + } + + pub fn update_cmd(&mut self, cmd: &ProcessBuilder) -> CargoResult<()> { + self.program = cmd + .get_program() + .to_str() + .ok_or_else(|| failure::format_err!("unicode program string required"))? + .to_string(); + self.cwd = Some(cmd.get_cwd().unwrap().to_path_buf()); + for arg in cmd.get_args().iter() { + self.args.push( + arg.to_str() + .ok_or_else(|| failure::format_err!("unicode argument string required"))? + .to_string(), + ); + } + for (var, value) in cmd.get_envs() { + let value = match value { + Some(s) => s, + None => continue, + }; + self.env.insert( + var.clone(), + value + .to_str() + .ok_or_else(|| failure::format_err!("unicode environment value required"))? + .to_string(), + ); + } + Ok(()) + } +} + +impl BuildPlan { + pub fn new() -> BuildPlan { + BuildPlan { + invocation_map: BTreeMap::new(), + plan: SerializedBuildPlan::new(), + } + } + + pub fn add<'a>(&mut self, cx: &Context<'a, '_>, unit: &Unit<'a>) -> CargoResult<()> { + let id = self.plan.invocations.len(); + self.invocation_map.insert(unit.buildkey(), id); + let deps = cx + .dep_targets(unit) + .iter() + .map(|dep| self.invocation_map[&dep.buildkey()]) + .collect(); + let invocation = Invocation::new(unit, deps); + self.plan.invocations.push(invocation); + Ok(()) + } + + pub fn update( + &mut self, + invocation_name: &str, + cmd: &ProcessBuilder, + outputs: &[OutputFile], + ) -> CargoResult<()> { + let id = self.invocation_map[invocation_name]; + let invocation = + self.plan.invocations.get_mut(id).ok_or_else(|| { + internal(format!("couldn't find invocation for {}", invocation_name)) + })?; + + invocation.update_cmd(cmd)?; + for output in outputs.iter() { + invocation.add_output(&output.path, &output.hardlink); + } + + Ok(()) + } + + pub fn set_inputs(&mut self, inputs: Vec) { + self.plan.inputs = inputs; + } + + pub fn output_plan(self) { + let encoded = serde_json::to_string(&self.plan).unwrap(); + println!("{}", encoded); + } +} + +impl SerializedBuildPlan { + pub fn new() -> SerializedBuildPlan { + SerializedBuildPlan { + invocations: Vec::new(), + inputs: Vec::new(), + } + } +} diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs new file mode 100644 index 00000000000..8c24c2e281b --- /dev/null +++ b/src/cargo/core/compiler/compilation.rs @@ -0,0 +1,313 @@ +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::env; +use std::ffi::OsStr; +use std::path::PathBuf; + +use semver::Version; + +use super::BuildContext; +use crate::core::{Edition, Package, PackageId, Target}; +use crate::util::{self, join_paths, process, CargoResult, CfgExpr, Config, ProcessBuilder}; + +pub struct Doctest { + /// The package being doc-tested. + pub package: Package, + /// The target being tested (currently always the package's lib). + pub target: Target, + /// Extern dependencies needed by `rustdoc`. The path is the location of + /// the compiled lib. + pub deps: Vec<(String, PathBuf)>, +} + +/// A structure returning the result of a compilation. +pub struct Compilation<'cfg> { + /// An array of all tests created during this compilation. + pub tests: Vec<(Package, Target, PathBuf)>, + + /// An array of all binaries created. + pub binaries: Vec, + + /// All directories for the output of native build commands. + /// + /// This is currently used to drive some entries which are added to the + /// LD_LIBRARY_PATH as appropriate. + /// + /// The order should be deterministic. + pub native_dirs: BTreeSet, + + /// Root output directory (for the local package's artifacts) + pub root_output: PathBuf, + + /// Output directory for rust dependencies. + /// May be for the host or for a specific target. + pub deps_output: PathBuf, + + /// Output directory for the rust host dependencies. + pub host_deps_output: PathBuf, + + /// The path to rustc's own libstd + pub host_dylib_path: PathBuf, + + /// The path to libstd for the target + pub target_dylib_path: PathBuf, + + /// Extra environment variables that were passed to compilations and should + /// be passed to future invocations of programs. + pub extra_env: HashMap>, + + /// Libraries to test with rustdoc. + pub to_doc_test: Vec, + + /// Features per package enabled during this compilation. + pub cfgs: HashMap>, + + /// Flags to pass to rustdoc when invoked from cargo test, per package. + pub rustdocflags: HashMap>, + + pub host: String, + pub target: String, + + config: &'cfg Config, + rustc_process: ProcessBuilder, + primary_unit_rustc_process: Option, + + target_runner: Option<(PathBuf, Vec)>, +} + +impl<'cfg> Compilation<'cfg> { + pub fn new<'a>(bcx: &BuildContext<'a, 'cfg>) -> CargoResult> { + let mut rustc = bcx.rustc.process(); + + let mut primary_unit_rustc_process = + bcx.build_config.primary_unit_rustc.clone().map(|mut r| { + r.arg(&bcx.rustc.path); + r + }); + + if bcx.config.extra_verbose() { + rustc.display_env_vars(); + + if let Some(rustc) = primary_unit_rustc_process.as_mut() { + rustc.display_env_vars(); + } + } + + Ok(Compilation { + // TODO: deprecated; remove. + native_dirs: BTreeSet::new(), + root_output: PathBuf::from("/"), + deps_output: PathBuf::from("/"), + host_deps_output: PathBuf::from("/"), + host_dylib_path: bcx.host_info.sysroot_libdir.clone(), + target_dylib_path: bcx.target_info.sysroot_libdir.clone(), + tests: Vec::new(), + binaries: Vec::new(), + extra_env: HashMap::new(), + to_doc_test: Vec::new(), + cfgs: HashMap::new(), + rustdocflags: HashMap::new(), + config: bcx.config, + rustc_process: rustc, + primary_unit_rustc_process, + host: bcx.host_triple().to_string(), + target: bcx.target_triple().to_string(), + target_runner: target_runner(bcx)?, + }) + } + + /// See `process`. + pub fn rustc_process( + &self, + pkg: &Package, + target: &Target, + is_primary: bool, + ) -> CargoResult { + let rustc = if is_primary { + self.primary_unit_rustc_process + .clone() + .unwrap_or_else(|| self.rustc_process.clone()) + } else { + self.rustc_process.clone() + }; + + let mut p = self.fill_env(rustc, pkg, true)?; + if target.edition() != Edition::Edition2015 { + p.arg(format!("--edition={}", target.edition())); + } + Ok(p) + } + + /// See `process`. + pub fn rustdoc_process(&self, pkg: &Package, target: &Target) -> CargoResult { + let mut p = self.fill_env(process(&*self.config.rustdoc()?), pkg, false)?; + if target.edition() != Edition::Edition2015 { + p.arg(format!("--edition={}", target.edition())); + } + Ok(p) + } + + /// See `process`. + pub fn host_process>( + &self, + cmd: T, + pkg: &Package, + ) -> CargoResult { + self.fill_env(process(cmd), pkg, true) + } + + fn target_runner(&self) -> &Option<(PathBuf, Vec)> { + &self.target_runner + } + + /// See `process`. + pub fn target_process>( + &self, + cmd: T, + pkg: &Package, + ) -> CargoResult { + let builder = if let Some((ref runner, ref args)) = *self.target_runner() { + let mut builder = process(runner); + builder.args(args); + builder.arg(cmd); + builder + } else { + process(cmd) + }; + self.fill_env(builder, pkg, false) + } + + /// Prepares a new process with an appropriate environment to run against + /// the artifacts produced by the build process. + /// + /// The package argument is also used to configure environment variables as + /// well as the working directory of the child process. + fn fill_env( + &self, + mut cmd: ProcessBuilder, + pkg: &Package, + is_host: bool, + ) -> CargoResult { + let mut search_path = if is_host { + let mut search_path = vec![self.host_deps_output.clone()]; + search_path.push(self.host_dylib_path.clone()); + search_path + } else { + let mut search_path = + super::filter_dynamic_search_path(self.native_dirs.iter(), &self.root_output); + search_path.push(self.deps_output.clone()); + search_path.push(self.root_output.clone()); + search_path.push(self.target_dylib_path.clone()); + search_path + }; + + let dylib_path = util::dylib_path(); + let dylib_path_is_empty = dylib_path.is_empty(); + search_path.extend(dylib_path.into_iter()); + if cfg!(target_os = "macos") && dylib_path_is_empty { + // These are the defaults when DYLD_FALLBACK_LIBRARY_PATH isn't + // set or set to an empty string. Since Cargo is explicitly setting + // the value, make sure the defaults still work. + if let Some(home) = env::var_os("HOME") { + search_path.push(PathBuf::from(home).join("lib")); + } + search_path.push(PathBuf::from("/usr/local/lib")); + search_path.push(PathBuf::from("/usr/lib")); + } + let search_path = join_paths(&search_path, util::dylib_path_envvar())?; + + cmd.env(util::dylib_path_envvar(), &search_path); + if let Some(env) = self.extra_env.get(&pkg.package_id()) { + for &(ref k, ref v) in env { + cmd.env(k, v); + } + } + + let metadata = pkg.manifest().metadata(); + + let cargo_exe = self.config.cargo_exe()?; + cmd.env(crate::CARGO_ENV, cargo_exe); + + // When adding new environment variables depending on + // crate properties which might require rebuild upon change + // consider adding the corresponding properties to the hash + // in BuildContext::target_metadata() + cmd.env("CARGO_MANIFEST_DIR", pkg.root()) + .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) + .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) + .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) + .env( + "CARGO_PKG_VERSION_PRE", + &pre_version_component(pkg.version()), + ) + .env("CARGO_PKG_VERSION", &pkg.version().to_string()) + .env("CARGO_PKG_NAME", &*pkg.name()) + .env( + "CARGO_PKG_DESCRIPTION", + metadata.description.as_ref().unwrap_or(&String::new()), + ) + .env( + "CARGO_PKG_HOMEPAGE", + metadata.homepage.as_ref().unwrap_or(&String::new()), + ) + .env( + "CARGO_PKG_REPOSITORY", + metadata.repository.as_ref().unwrap_or(&String::new()), + ) + .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":")) + .cwd(pkg.root()); + Ok(cmd) + } +} + +fn pre_version_component(v: &Version) -> String { + if v.pre.is_empty() { + return String::new(); + } + + let mut ret = String::new(); + + for (i, x) in v.pre.iter().enumerate() { + if i != 0 { + ret.push('.') + }; + ret.push_str(&x.to_string()); + } + + ret +} + +fn target_runner(bcx: &BuildContext<'_, '_>) -> CargoResult)>> { + let target = bcx.target_triple(); + + // try target.{}.runner + let key = format!("target.{}.runner", target); + if let Some(v) = bcx.config.get_path_and_args(&key)? { + return Ok(Some(v.val)); + } + + // try target.'cfg(...)'.runner + if let Some(table) = bcx.config.get_table("target")? { + let mut matching_runner = None; + + for key in table.val.keys() { + if CfgExpr::matches_key(key, bcx.target_info.cfg()) { + let key = format!("target.{}.runner", key); + if let Some(runner) = bcx.config.get_path_and_args(&key)? { + // more than one match, error out + if matching_runner.is_some() { + failure::bail!( + "several matching instances of `target.'cfg(..)'.runner` \ + in `.cargo/config`" + ) + } + + matching_runner = Some(runner.val); + } + } + } + + return Ok(matching_runner); + } + + Ok(None) +} diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs new file mode 100644 index 00000000000..caf7c5f8be0 --- /dev/null +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -0,0 +1,599 @@ +use std::collections::HashMap; +use std::env; +use std::fmt; +use std::hash::{Hash, Hasher, SipHasher}; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use lazycell::LazyCell; +use log::info; + +use super::{BuildContext, Context, FileFlavor, Kind, Layout}; +use crate::core::compiler::{CompileMode, Unit}; +use crate::core::{TargetKind, Workspace}; +use crate::util::{self, CargoResult}; + +/// The `Metadata` is a hash used to make unique file names for each unit in a build. +/// For example: +/// - A project may depend on crate `A` and crate `B`, so the package name must be in the file name. +/// - Similarly a project may depend on two versions of `A`, so the version must be in the file name. +/// In general this must include all things that need to be distinguished in different parts of +/// the same build. This is absolutely required or we override things before +/// we get chance to use them. +/// +/// We use a hash because it is an easy way to guarantee +/// that all the inputs can be converted to a valid path. +/// +/// This also acts as the main layer of caching provided by Cargo. +/// For example, we want to cache `cargo build` and `cargo doc` separately, so that running one +/// does not invalidate the artifacts for the other. We do this by including `CompileMode` in the +/// hash, thus the artifacts go in different folders and do not override each other. +/// If we don't add something that we should have, for this reason, we get the +/// correct output but rebuild more than is needed. +/// +/// Some things that need to be tracked to ensure the correct output should definitely *not* +/// go in the `Metadata`. For example, the modification time of a file, should be tracked to make a +/// rebuild when the file changes. However, it would be wasteful to include in the `Metadata`. The +/// old artifacts are never going to be needed again. We can save space by just overwriting them. +/// If we add something that we should not have, for this reason, we get the correct output but take +/// more space than needed. This makes not including something in `Metadata` +/// a form of cache invalidation. +/// +/// Note that the `Fingerprint` is in charge of tracking everything needed to determine if a +/// rebuild is needed. +#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd)] +pub struct Metadata(u64); + +impl fmt::Display for Metadata { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:016x}", self.0) + } +} + +pub struct CompilationFiles<'a, 'cfg> { + /// The target directory layout for the host (and target if it is the same as host). + pub(super) host: Layout, + /// The target directory layout for the target (if different from then host). + pub(super) target: Option, + /// Additional directory to include a copy of the outputs. + export_dir: Option, + /// The root targets requested by the user on the command line (does not + /// include dependencies). + roots: Vec>, + ws: &'a Workspace<'cfg>, + metas: HashMap, Option>, + /// For each Unit, a list all files produced. + outputs: HashMap, LazyCell>>>, +} + +#[derive(Debug)] +pub struct OutputFile { + /// Absolute path to the file that will be produced by the build process. + pub path: PathBuf, + /// If it should be linked into `target`, and what it should be called + /// (e.g., without metadata). + pub hardlink: Option, + /// If `--out-dir` is specified, the absolute path to the exported file. + pub export_path: Option, + /// Type of the file (library / debug symbol / else). + pub flavor: FileFlavor, +} + +impl OutputFile { + /// Gets the hard link if present; otherwise, returns the path. + pub fn bin_dst(&self) -> &PathBuf { + match self.hardlink { + Some(ref link_dst) => link_dst, + None => &self.path, + } + } +} + +impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { + pub(super) fn new( + roots: &[Unit<'a>], + host: Layout, + target: Option, + export_dir: Option, + ws: &'a Workspace<'cfg>, + cx: &Context<'a, 'cfg>, + ) -> CompilationFiles<'a, 'cfg> { + let mut metas = HashMap::new(); + for unit in roots { + metadata_of(unit, cx, &mut metas); + } + let outputs = metas + .keys() + .cloned() + .map(|unit| (unit, LazyCell::new())) + .collect(); + CompilationFiles { + ws, + host, + target, + export_dir, + roots: roots.to_vec(), + metas, + outputs, + } + } + + /// Returns the appropriate directory layout for either a plugin or not. + pub fn layout(&self, kind: Kind) -> &Layout { + match kind { + Kind::Host => &self.host, + Kind::Target => self.target.as_ref().unwrap_or(&self.host), + } + } + + /// Gets the metadata for a target in a specific profile. + /// We build to the path `"{filename}-{target_metadata}"`. + /// We use a linking step to link/copy to a predictable filename + /// like `target/debug/libfoo.{a,so,rlib}` and such. + pub fn metadata(&self, unit: &Unit<'a>) -> Option { + self.metas[unit].clone() + } + + /// Gets the short hash based only on the `PackageId`. + /// Used for the metadata when `target_metadata` returns `None`. + pub fn target_short_hash(&self, unit: &Unit<'_>) -> String { + let hashable = unit.pkg.package_id().stable_hash(self.ws.root()); + util::short_hash(&hashable) + } + + /// Returns the appropriate output directory for the specified package and + /// target. + pub fn out_dir(&self, unit: &Unit<'a>) -> PathBuf { + if unit.mode.is_doc() { + self.layout(unit.kind).root().parent().unwrap().join("doc") + } else if unit.mode.is_doc_test() { + panic!("doc tests do not have an out dir"); + } else if unit.target.is_custom_build() { + self.build_script_dir(unit) + } else if unit.target.is_example() { + self.layout(unit.kind).examples().to_path_buf() + } else { + self.deps_dir(unit).to_path_buf() + } + } + + pub fn export_dir(&self) -> Option { + self.export_dir.clone() + } + + pub fn pkg_dir(&self, unit: &Unit<'a>) -> String { + let name = unit.pkg.package_id().name(); + match self.metas[unit] { + Some(ref meta) => format!("{}-{}", name, meta), + None => format!("{}-{}", name, self.target_short_hash(unit)), + } + } + + /// Returns the root of the build output tree for the target + pub fn target_root(&self) -> &Path { + self.target.as_ref().unwrap_or(&self.host).dest() + } + + /// Returns the root of the build output tree for the host + pub fn host_root(&self) -> &Path { + self.host.dest() + } + + pub fn host_deps(&self) -> &Path { + self.host.deps() + } + + /// Returns the directories where Rust crate dependencies are found for the + /// specified unit. + pub fn deps_dir(&self, unit: &Unit<'_>) -> &Path { + self.layout(unit.kind).deps() + } + + pub fn fingerprint_dir(&self, unit: &Unit<'a>) -> PathBuf { + let dir = self.pkg_dir(unit); + self.layout(unit.kind).fingerprint().join(dir) + } + + /// Path where compiler output is cached. + pub fn message_cache_path(&self, unit: &Unit<'a>) -> PathBuf { + self.fingerprint_dir(unit).join("output") + } + + /// Returns the directory where a compiled build script is stored. + /// `/path/to/target/{debug,release}/build/PKG-HASH` + pub fn build_script_dir(&self, unit: &Unit<'a>) -> PathBuf { + assert!(unit.target.is_custom_build()); + assert!(!unit.mode.is_run_custom_build()); + let dir = self.pkg_dir(unit); + self.layout(Kind::Host).build().join(dir) + } + + /// Returns the directory where information about running a build script + /// is stored. + /// `/path/to/target/{debug,release}/build/PKG-HASH` + pub fn build_script_run_dir(&self, unit: &Unit<'a>) -> PathBuf { + assert!(unit.target.is_custom_build()); + assert!(unit.mode.is_run_custom_build()); + let dir = self.pkg_dir(unit); + self.layout(unit.kind).build().join(dir) + } + + /// Returns the "OUT_DIR" directory for running a build script. + /// `/path/to/target/{debug,release}/build/PKG-HASH/out` + pub fn build_script_out_dir(&self, unit: &Unit<'a>) -> PathBuf { + self.build_script_run_dir(unit).join("out") + } + + /// Returns the file stem for a given target/profile combo (with metadata). + pub fn file_stem(&self, unit: &Unit<'a>) -> String { + match self.metas[unit] { + Some(ref metadata) => format!("{}-{}", unit.target.crate_name(), metadata), + None => self.bin_stem(unit), + } + } + + pub(super) fn outputs( + &self, + unit: &Unit<'a>, + bcx: &BuildContext<'a, 'cfg>, + ) -> CargoResult>> { + self.outputs[unit] + .try_borrow_with(|| self.calc_outputs(unit, bcx)) + .map(Arc::clone) + } + + /// Returns the bin stem for a given target (without metadata). + fn bin_stem(&self, unit: &Unit<'_>) -> String { + if unit.target.allows_underscores() { + unit.target.name().to_string() + } else { + unit.target.crate_name() + } + } + + /// Returns a tuple with the directory and name of the hard link we expect + /// our target to be copied to. Eg, file_stem may be out_dir/deps/foo-abcdef + /// and link_stem would be out_dir/foo + /// This function returns it in two parts so the caller can add prefix/suffix + /// to filename separately. + /// + /// Returns an `Option` because in some cases we don't want to link + /// (eg a dependent lib). + fn link_stem(&self, unit: &Unit<'a>) -> Option<(PathBuf, String)> { + let out_dir = self.out_dir(unit); + let bin_stem = self.bin_stem(unit); + let file_stem = self.file_stem(unit); + + // We currently only lift files up from the `deps` directory. If + // it was compiled into something like `example/` or `doc/` then + // we don't want to link it up. + if out_dir.ends_with("deps") { + // Don't lift up library dependencies. + if unit.target.is_bin() || self.roots.contains(unit) { + Some(( + out_dir.parent().unwrap().to_owned(), + if unit.mode.is_any_test() { + file_stem + } else { + bin_stem + }, + )) + } else { + None + } + } else if bin_stem == file_stem { + None + } else if out_dir.ends_with("examples") || out_dir.parent().unwrap().ends_with("build") { + Some((out_dir, bin_stem)) + } else { + None + } + } + + fn calc_outputs( + &self, + unit: &Unit<'a>, + bcx: &BuildContext<'a, 'cfg>, + ) -> CargoResult>> { + let ret = match unit.mode { + CompileMode::Check { .. } => { + // This may be confusing. rustc outputs a file named `lib*.rmeta` + // for both libraries and binaries. + let file_stem = self.file_stem(unit); + let path = self.out_dir(unit).join(format!("lib{}.rmeta", file_stem)); + vec![OutputFile { + path, + hardlink: None, + export_path: None, + flavor: FileFlavor::Linkable { rmeta: false }, + }] + } + CompileMode::Doc { .. } => { + let path = self + .out_dir(unit) + .join(unit.target.crate_name()) + .join("index.html"); + vec![OutputFile { + path, + hardlink: None, + export_path: None, + flavor: FileFlavor::Normal, + }] + } + CompileMode::RunCustomBuild => { + // At this time, this code path does not handle build script + // outputs. + vec![] + } + CompileMode::Doctest => { + // Doctests are built in a temporary directory and then + // deleted. There is the `--persist-doctests` unstable flag, + // but Cargo does not know about that. + vec![] + } + CompileMode::Test | CompileMode::Build | CompileMode::Bench => { + self.calc_outputs_rustc(unit, bcx)? + } + }; + info!("Target filenames: {:?}", ret); + + Ok(Arc::new(ret)) + } + + fn calc_outputs_rustc( + &self, + unit: &Unit<'a>, + bcx: &BuildContext<'a, 'cfg>, + ) -> CargoResult> { + let mut ret = Vec::new(); + let mut unsupported = Vec::new(); + + let out_dir = self.out_dir(unit); + let link_stem = self.link_stem(unit); + let info = if unit.kind == Kind::Host { + &bcx.host_info + } else { + &bcx.target_info + }; + let file_stem = self.file_stem(unit); + + let mut add = |crate_type: &str, flavor: FileFlavor| -> CargoResult<()> { + let crate_type = if crate_type == "lib" { + "rlib" + } else { + crate_type + }; + let file_types = + info.file_types(crate_type, flavor, unit.target.kind(), bcx.target_triple())?; + + match file_types { + Some(types) => { + for file_type in types { + let path = out_dir.join(file_type.filename(&file_stem)); + let hardlink = link_stem + .as_ref() + .map(|&(ref ld, ref ls)| ld.join(file_type.filename(ls))); + let export_path = if unit.target.is_custom_build() { + None + } else { + self.export_dir.as_ref().and_then(|export_dir| { + hardlink.as_ref().and_then(|hardlink| { + Some(export_dir.join(hardlink.file_name().unwrap())) + }) + }) + }; + ret.push(OutputFile { + path, + hardlink, + export_path, + flavor: file_type.flavor, + }); + } + } + // Not supported; don't worry about it. + None => { + unsupported.push(crate_type.to_string()); + } + } + Ok(()) + }; + match *unit.target.kind() { + TargetKind::Bin + | TargetKind::CustomBuild + | TargetKind::ExampleBin + | TargetKind::Bench + | TargetKind::Test => { + add("bin", FileFlavor::Normal)?; + } + TargetKind::Lib(..) | TargetKind::ExampleLib(..) if unit.mode.is_any_test() => { + add("bin", FileFlavor::Normal)?; + } + TargetKind::ExampleLib(ref kinds) | TargetKind::Lib(ref kinds) => { + for kind in kinds { + add( + kind.crate_type(), + if kind.linkable() { + FileFlavor::Linkable { rmeta: false } + } else { + FileFlavor::Normal + }, + )?; + } + let path = out_dir.join(format!("lib{}.rmeta", file_stem)); + if !unit.requires_upstream_objects() { + ret.push(OutputFile { + path, + hardlink: None, + export_path: None, + flavor: FileFlavor::Linkable { rmeta: true }, + }); + } + } + } + if ret.is_empty() { + if !unsupported.is_empty() { + failure::bail!( + "cannot produce {} for `{}` as the target `{}` \ + does not support these crate types", + unsupported.join(", "), + unit.pkg, + bcx.target_triple() + ) + } + failure::bail!( + "cannot compile `{}` as the target `{}` does not \ + support any of the output crate types", + unit.pkg, + bcx.target_triple() + ); + } + Ok(ret) + } +} + +fn metadata_of<'a, 'cfg>( + unit: &Unit<'a>, + cx: &Context<'a, 'cfg>, + metas: &mut HashMap, Option>, +) -> Option { + if !metas.contains_key(unit) { + let meta = compute_metadata(unit, cx, metas); + metas.insert(*unit, meta); + for unit in cx.dep_targets(unit) { + metadata_of(&unit, cx, metas); + } + } + metas[unit].clone() +} + +fn compute_metadata<'a, 'cfg>( + unit: &Unit<'a>, + cx: &Context<'a, 'cfg>, + metas: &mut HashMap, Option>, +) -> Option { + if unit.mode.is_doc_test() { + // Doc tests do not have metadata. + return None; + } + // No metadata for dylibs because of a couple issues: + // - macOS encodes the dylib name in the executable, + // - Windows rustc multiple files of which we can't easily link all of them. + // + // No metadata for bin because of an issue: + // - wasm32 rustc/emcc encodes the `.wasm` name in the `.js` (rust-lang/cargo#4535). + // + // Two exceptions: + // 1) Upstream dependencies (we aren't exporting + need to resolve name conflict), + // 2) `__CARGO_DEFAULT_LIB_METADATA` env var. + // + // Note, however, that the compiler's build system at least wants + // path dependencies (eg libstd) to have hashes in filenames. To account for + // that we have an extra hack here which reads the + // `__CARGO_DEFAULT_LIB_METADATA` environment variable and creates a + // hash in the filename if that's present. + // + // This environment variable should not be relied on! It's + // just here for rustbuild. We need a more principled method + // doing this eventually. + let bcx = &cx.bcx; + let __cargo_default_lib_metadata = env::var("__CARGO_DEFAULT_LIB_METADATA"); + if !(unit.mode.is_any_test() || unit.mode.is_check()) + && (unit.target.is_dylib() + || unit.target.is_cdylib() + || (unit.target.is_executable() && bcx.target_triple().starts_with("wasm32-"))) + && unit.pkg.package_id().source_id().is_path() + && __cargo_default_lib_metadata.is_err() + { + return None; + } + + let mut hasher = SipHasher::new_with_keys(0, 0); + + // This is a generic version number that can be changed to make + // backwards-incompatible changes to any file structures in the output + // directory. For example, the fingerprint files or the build-script + // output files. Normally cargo updates ship with rustc updates which will + // cause a new hash due to the rustc version changing, but this allows + // cargo to be extra careful to deal with different versions of cargo that + // use the same rustc version. + 1.hash(&mut hasher); + + // Unique metadata per (name, source, version) triple. This'll allow us + // to pull crates from anywhere without worrying about conflicts. + unit.pkg + .package_id() + .stable_hash(bcx.ws.root()) + .hash(&mut hasher); + + // Also mix in enabled features to our metadata. This'll ensure that + // when changing feature sets each lib is separately cached. + bcx.resolve + .features_sorted(unit.pkg.package_id()) + .hash(&mut hasher); + + // Mix in the target-metadata of all the dependencies of this target. + { + let mut deps_metadata = cx + .dep_targets(unit) + .iter() + .map(|dep| metadata_of(dep, cx, metas)) + .collect::>(); + deps_metadata.sort(); + deps_metadata.hash(&mut hasher); + } + + // Throw in the profile we're compiling with. This helps caching + // `panic=abort` and `panic=unwind` artifacts, additionally with various + // settings like debuginfo and whatnot. + unit.profile.hash(&mut hasher); + unit.mode.hash(&mut hasher); + + // Throw in the rustflags we're compiling with. + // This helps when the target directory is a shared cache for projects with different cargo configs, + // or if the user is experimenting with different rustflags manually. + let mut hash_flags = |flags: &[String]| { + // Ignore some flags. These may affect reproducible builds if they affect + // the path. The fingerprint will handle recompilation if these change. + let mut iter = flags.iter(); + while let Some(flag) = iter.next() { + if flag.starts_with("--remap-path-prefix=") { + continue; + } + if flag == "--remap-path-prefix" { + iter.next(); + continue; + } + flag.hash(&mut hasher); + } + }; + if let Some(args) = bcx.extra_args_for(unit) { + // Arguments passed to `cargo rustc`. + hash_flags(args); + } + // Arguments passed in via RUSTFLAGS env var. + let flags = if unit.mode.is_doc() { + bcx.rustdocflags_args(unit) + } else { + bcx.rustflags_args(unit) + }; + hash_flags(flags); + + // Artifacts compiled for the host should have a different metadata + // piece than those compiled for the target, so make sure we throw in + // the unit's `kind` as well + unit.kind.hash(&mut hasher); + + // Finally throw in the target name/kind. This ensures that concurrent + // compiles of targets in the same crate don't collide. + unit.target.name().hash(&mut hasher); + unit.target.kind().hash(&mut hasher); + + bcx.rustc.verbose_version.hash(&mut hasher); + + // Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present. + // This should be the release channel, to get a different hash for each channel. + if let Ok(ref channel) = __cargo_default_lib_metadata { + channel.hash(&mut hasher); + } + Some(Metadata(hasher.finish())) +} diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs new file mode 100644 index 00000000000..3cb75e344ba --- /dev/null +++ b/src/cargo/core/compiler/context/mod.rs @@ -0,0 +1,563 @@ +#![allow(deprecated)] +use std::collections::{HashMap, HashSet}; +use std::ffi::OsStr; +use std::fmt::Write; +use std::path::PathBuf; +use std::sync::Arc; + +use filetime::FileTime; +use jobserver::Client; + +use crate::core::compiler::compilation; +use crate::core::compiler::Unit; +use crate::core::{Package, PackageId, Resolve}; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{internal, profile, Config}; + +use super::build_plan::BuildPlan; +use super::custom_build::{self, BuildDeps, BuildScripts, BuildState}; +use super::fingerprint::Fingerprint; +use super::job_queue::JobQueue; +use super::layout::Layout; +use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind}; + +mod unit_dependencies; +use self::unit_dependencies::build_unit_dependencies; + +mod compilation_files; +use self::compilation_files::CompilationFiles; +pub use self::compilation_files::{Metadata, OutputFile}; + +pub struct Context<'a, 'cfg> { + pub bcx: &'a BuildContext<'a, 'cfg>, + pub compilation: Compilation<'cfg>, + pub build_state: Arc, + pub build_script_overridden: HashSet<(PackageId, Kind)>, + pub build_explicit_deps: HashMap, BuildDeps>, + pub fingerprints: HashMap, Arc>, + pub mtime_cache: HashMap, + pub compiled: HashSet>, + pub build_scripts: HashMap, Arc>, + pub links: Links, + pub jobserver: Client, + primary_packages: HashSet, + unit_dependencies: HashMap, Vec>>, + files: Option>, + package_cache: HashMap, + + /// A flag indicating whether pipelining is enabled for this compilation + /// session. Pipelining largely only affects the edges of the dependency + /// graph that we generate at the end, and otherwise it's pretty + /// straightforward. + pipelining: bool, + + /// A set of units which are compiling rlibs and are expected to produce + /// metadata files in addition to the rlib itself. This is only filled in + /// when `pipelining` above is enabled. + rmeta_required: HashSet>, +} + +impl<'a, 'cfg> Context<'a, 'cfg> { + pub fn new(config: &'cfg Config, bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult { + // Load up the jobserver that we'll use to manage our parallelism. This + // is the same as the GNU make implementation of a jobserver, and + // intentionally so! It's hoped that we can interact with GNU make and + // all share the same jobserver. + // + // Note that if we don't have a jobserver in our environment then we + // create our own, and we create it with `n-1` tokens because one token + // is ourself, a running process. + let jobserver = match config.jobserver_from_env() { + Some(c) => c.clone(), + None => Client::new(bcx.build_config.jobs as usize - 1) + .chain_err(|| "failed to create jobserver")?, + }; + + let pipelining = bcx + .config + .get_bool("build.pipelining")? + .map(|t| t.val) + .unwrap_or(false); + + Ok(Self { + bcx, + compilation: Compilation::new(bcx)?, + build_state: Arc::new(BuildState::new(&bcx.host_config, &bcx.target_config)), + fingerprints: HashMap::new(), + mtime_cache: HashMap::new(), + compiled: HashSet::new(), + build_scripts: HashMap::new(), + build_explicit_deps: HashMap::new(), + links: Links::new(), + jobserver, + build_script_overridden: HashSet::new(), + + primary_packages: HashSet::new(), + unit_dependencies: HashMap::new(), + files: None, + package_cache: HashMap::new(), + rmeta_required: HashSet::new(), + pipelining, + }) + } + + // Returns a mapping of the root package plus its immediate dependencies to + // where the compiled libraries are all located. + pub fn compile( + mut self, + units: &[Unit<'a>], + export_dir: Option, + exec: &Arc, + ) -> CargoResult> { + let mut queue = JobQueue::new(self.bcx); + let mut plan = BuildPlan::new(); + let build_plan = self.bcx.build_config.build_plan; + self.prepare_units(export_dir, units)?; + self.prepare()?; + custom_build::build_map(&mut self, units)?; + self.check_collistions()?; + + for unit in units.iter() { + // Build up a list of pending jobs, each of which represent + // compiling a particular package. No actual work is executed as + // part of this, that's all done next as part of the `execute` + // function which will run everything in order with proper + // parallelism. + let force_rebuild = self.bcx.build_config.force_rebuild; + super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?; + } + + // Now that we've figured out everything that we're going to do, do it! + queue.execute(&mut self, &mut plan)?; + + if build_plan { + plan.set_inputs(self.build_plan_inputs()?); + plan.output_plan(); + } + + for unit in units.iter() { + for output in self.outputs(unit)?.iter() { + if output.flavor == FileFlavor::DebugInfo { + continue; + } + + let bindst = output.bin_dst(); + + if unit.mode == CompileMode::Test { + self.compilation.tests.push(( + unit.pkg.clone(), + unit.target.clone(), + output.path.clone(), + )); + } else if unit.target.is_executable() { + self.compilation.binaries.push(bindst.clone()); + } + } + + for dep in self.dep_targets(unit).iter() { + if !unit.target.is_lib() { + continue; + } + + if dep.mode.is_run_custom_build() { + let out_dir = self.files().build_script_out_dir(dep).display().to_string(); + self.compilation + .extra_env + .entry(dep.pkg.package_id()) + .or_insert_with(Vec::new) + .push(("OUT_DIR".to_string(), out_dir)); + } + } + + if unit.mode.is_doc_test() { + // Note that we can *only* doc-test rlib outputs here. A + // staticlib output cannot be linked by the compiler (it just + // doesn't do that). A dylib output, however, can be linked by + // the compiler, but will always fail. Currently all dylibs are + // built as "static dylibs" where the standard library is + // statically linked into the dylib. The doc tests fail, + // however, for now as they try to link the standard library + // dynamically as well, causing problems. As a result we only + // pass `--extern` for rlib deps and skip out on all other + // artifacts. + let mut doctest_deps = Vec::new(); + for dep in self.dep_targets(unit) { + if dep.target.is_lib() && dep.mode == CompileMode::Build { + let outputs = self.outputs(&dep)?; + let outputs = outputs.iter().filter(|output| { + output.path.extension() == Some(OsStr::new("rlib")) + || dep.target.for_host() + }); + for output in outputs { + doctest_deps.push(( + self.bcx.extern_crate_name(unit, &dep)?, + output.path.clone(), + )); + } + } + } + // Help with tests to get a stable order with renamed deps. + doctest_deps.sort(); + self.compilation.to_doc_test.push(compilation::Doctest { + package: unit.pkg.clone(), + target: unit.target.clone(), + deps: doctest_deps, + }); + } + + let bcx = self.bcx; + let feats = bcx.resolve.features(unit.pkg.package_id()); + if !feats.is_empty() { + self.compilation + .cfgs + .entry(unit.pkg.package_id()) + .or_insert_with(|| { + feats + .iter() + .filter(|feat| bcx.platform_activated(feat.1.as_ref(), unit.kind)) + .map(|feat| format!("feature=\"{}\"", feat.0)) + .collect() + }); + } + let rustdocflags = self.bcx.rustdocflags_args(unit); + if !rustdocflags.is_empty() { + self.compilation + .rustdocflags + .entry(unit.pkg.package_id()) + .or_insert_with(|| rustdocflags.to_vec()); + } + + super::output_depinfo(&mut self, unit)?; + } + + for (&(ref pkg, _), output) in self.build_state.outputs.lock().unwrap().iter() { + self.compilation + .cfgs + .entry(pkg.clone()) + .or_insert_with(HashSet::new) + .extend(output.cfgs.iter().cloned()); + + self.compilation + .extra_env + .entry(pkg.clone()) + .or_insert_with(Vec::new) + .extend(output.env.iter().cloned()); + + for dir in output.library_paths.iter() { + self.compilation.native_dirs.insert(dir.clone()); + } + } + Ok(self.compilation) + } + + /// Returns the executable for the specified unit (if any). + pub fn get_executable(&mut self, unit: &Unit<'a>) -> CargoResult> { + for output in self.outputs(unit)?.iter() { + if output.flavor == FileFlavor::DebugInfo { + continue; + } + + let is_binary = unit.target.is_executable(); + let is_test = unit.mode.is_any_test() && !unit.mode.is_check(); + + if is_binary || is_test { + return Ok(Option::Some(output.bin_dst().clone())); + } + } + Ok(None) + } + + pub fn prepare_units( + &mut self, + export_dir: Option, + units: &[Unit<'a>], + ) -> CargoResult<()> { + let dest = if self.bcx.build_config.release { + "release" + } else { + "debug" + }; + let host_layout = Layout::new(self.bcx.ws, None, dest)?; + let target_layout = match self.bcx.build_config.requested_target.as_ref() { + Some(target) => Some(Layout::new(self.bcx.ws, Some(target), dest)?), + None => None, + }; + self.primary_packages + .extend(units.iter().map(|u| u.pkg.package_id())); + + build_unit_dependencies(self, units)?; + let files = CompilationFiles::new( + units, + host_layout, + target_layout, + export_dir, + self.bcx.ws, + self, + ); + self.files = Some(files); + Ok(()) + } + + /// Prepare this context, ensuring that all filesystem directories are in + /// place. + pub fn prepare(&mut self) -> CargoResult<()> { + let _p = profile::start("preparing layout"); + + self.files_mut() + .host + .prepare() + .chain_err(|| internal("couldn't prepare build directories"))?; + if let Some(ref mut target) = self.files.as_mut().unwrap().target { + target + .prepare() + .chain_err(|| internal("couldn't prepare build directories"))?; + } + + self.compilation.host_deps_output = self.files_mut().host.deps().to_path_buf(); + + let files = self.files.as_ref().unwrap(); + let layout = files.target.as_ref().unwrap_or(&files.host); + self.compilation.root_output = layout.dest().to_path_buf(); + self.compilation.deps_output = layout.deps().to_path_buf(); + Ok(()) + } + + pub fn files(&self) -> &CompilationFiles<'a, 'cfg> { + self.files.as_ref().unwrap() + } + + fn files_mut(&mut self) -> &mut CompilationFiles<'a, 'cfg> { + self.files.as_mut().unwrap() + } + + /// Returns the filenames that the given unit will generate. + pub fn outputs(&self, unit: &Unit<'a>) -> CargoResult>> { + self.files.as_ref().unwrap().outputs(unit, self.bcx) + } + + /// For a package, return all targets which are registered as dependencies + /// for that package. + // + // TODO: this ideally should be `-> &[Unit<'a>]`. + pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec> { + // If this build script's execution has been overridden then we don't + // actually depend on anything, we've reached the end of the dependency + // chain as we've got all the info we're gonna get. + // + // Note there's a subtlety about this piece of code! The + // `build_script_overridden` map here is populated in + // `custom_build::build_map` which you need to call before inspecting + // dependencies. However, that code itself calls this method and + // gets a full pre-filtered set of dependencies. This is not super + // obvious, and clear, but it does work at the moment. + if unit.target.is_custom_build() { + let key = (unit.pkg.package_id(), unit.kind); + if self.build_script_overridden.contains(&key) { + return Vec::new(); + } + } + self.unit_dependencies[unit].clone() + } + + pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool { + self.primary_packages.contains(&unit.pkg.package_id()) + } + + /// Gets a package for the given package ID. + pub fn get_package(&self, id: PackageId) -> CargoResult<&'a Package> { + self.package_cache + .get(&id) + .cloned() + .ok_or_else(|| failure::format_err!("failed to find {}", id)) + } + + /// Returns the list of filenames read by cargo to generate the `BuildContext` + /// (all `Cargo.toml`, etc.). + pub fn build_plan_inputs(&self) -> CargoResult> { + let mut inputs = Vec::new(); + // Note that we're using the `package_cache`, which should have been + // populated by `build_unit_dependencies`, and only those packages are + // considered as all the inputs. + // + // (Notably, we skip dev-deps here if they aren't present.) + for pkg in self.package_cache.values() { + inputs.push(pkg.manifest_path().to_path_buf()); + } + inputs.sort(); + Ok(inputs) + } + + fn check_collistions(&self) -> CargoResult<()> { + let mut output_collisions = HashMap::new(); + let describe_collision = + |unit: &Unit<'_>, other_unit: &Unit<'_>, path: &PathBuf| -> String { + format!( + "The {} target `{}` in package `{}` has the same output \ + filename as the {} target `{}` in package `{}`.\n\ + Colliding filename is: {}\n", + unit.target.kind().description(), + unit.target.name(), + unit.pkg.package_id(), + other_unit.target.kind().description(), + other_unit.target.name(), + other_unit.pkg.package_id(), + path.display() + ) + }; + let suggestion = + "Consider changing their names to be unique or compiling them separately.\n\ + This may become a hard error in the future; see \ + ."; + let report_collision = |unit: &Unit<'_>, + other_unit: &Unit<'_>, + path: &PathBuf| + -> CargoResult<()> { + if unit.target.name() == other_unit.target.name() { + self.bcx.config.shell().warn(format!( + "output filename collision.\n\ + {}\ + The targets should have unique names.\n\ + {}", + describe_collision(unit, other_unit, path), + suggestion + )) + } else { + self.bcx.config.shell().warn(format!( + "output filename collision.\n\ + {}\ + The output filenames should be unique.\n\ + {}\n\ + If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\ + https://github.com/rust-lang/cargo/issues/ with as much information as you\n\ + can provide.\n\ + {} running on `{}` target `{}`\n\ + First unit: {:?}\n\ + Second unit: {:?}", + describe_collision(unit, other_unit, path), + suggestion, + crate::version(), self.bcx.host_triple(), self.bcx.target_triple(), + unit, other_unit)) + } + }; + let mut keys = self + .unit_dependencies + .keys() + .filter(|unit| !unit.mode.is_run_custom_build()) + .collect::>(); + // Sort for consistent error messages. + keys.sort_unstable(); + for unit in keys { + for output in self.outputs(unit)?.iter() { + if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) { + report_collision(unit, other_unit, &output.path)?; + } + if let Some(hardlink) = output.hardlink.as_ref() { + if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) { + report_collision(unit, other_unit, hardlink)?; + } + } + if let Some(ref export_path) = output.export_path { + if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) { + self.bcx.config.shell().warn(format!( + "`--out-dir` filename collision.\n\ + {}\ + The exported filenames should be unique.\n\ + {}", + describe_collision(unit, other_unit, export_path), + suggestion + ))?; + } + } + } + } + Ok(()) + } + + /// Returns whether when `parent` depends on `dep` if it only requires the + /// metadata file from `dep`. + pub fn only_requires_rmeta(&self, parent: &Unit<'a>, dep: &Unit<'a>) -> bool { + // this is only enabled when pipelining is enabled + self.pipelining + // We're only a candidate for requiring an `rmeta` file if we + // ourselves are building an rlib, + && !parent.requires_upstream_objects() + && parent.mode == CompileMode::Build + // Our dependency must also be built as an rlib, otherwise the + // object code must be useful in some fashion + && !dep.requires_upstream_objects() + && dep.mode == CompileMode::Build + } + + /// Returns whether when `unit` is built whether it should emit metadata as + /// well because some compilations rely on that. + pub fn rmeta_required(&self, unit: &Unit<'a>) -> bool { + self.rmeta_required.contains(unit) + } +} + +#[derive(Default)] +pub struct Links { + validated: HashSet, + links: HashMap, +} + +impl Links { + pub fn new() -> Links { + Links { + validated: HashSet::new(), + links: HashMap::new(), + } + } + + pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'_>) -> CargoResult<()> { + if !self.validated.insert(unit.pkg.package_id()) { + return Ok(()); + } + let lib = match unit.pkg.manifest().links() { + Some(lib) => lib, + None => return Ok(()), + }; + if let Some(&prev) = self.links.get(lib) { + let pkg = unit.pkg.package_id(); + + let describe_path = |pkgid: PackageId| -> String { + let dep_path = resolve.path_to_top(&pkgid); + let mut dep_path_desc = format!("package `{}`", dep_path[0]); + for dep in dep_path.iter().skip(1) { + write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap(); + } + dep_path_desc + }; + + failure::bail!( + "multiple packages link to native library `{}`, \ + but a native library can be linked only once\n\ + \n\ + {}\nlinks to native library `{}`\n\ + \n\ + {}\nalso links to native library `{}`", + lib, + describe_path(prev), + lib, + describe_path(pkg), + lib + ) + } + if !unit + .pkg + .manifest() + .targets() + .iter() + .any(|t| t.is_custom_build()) + { + failure::bail!( + "package `{}` specifies that it links to `{}` but does not \ + have a custom build script", + unit.pkg.package_id(), + lib + ) + } + self.links.insert(lib.to_string(), unit.pkg.package_id()); + Ok(()) + } +} diff --git a/src/cargo/core/compiler/context/unit_dependencies.rs b/src/cargo/core/compiler/context/unit_dependencies.rs new file mode 100644 index 00000000000..6b68e202cec --- /dev/null +++ b/src/cargo/core/compiler/context/unit_dependencies.rs @@ -0,0 +1,569 @@ +//! Constructs the dependency graph for compilation. +//! +//! Rust code is typically organized as a set of Cargo packages. The +//! dependencies between the packages themselves are stored in the +//! `Resolve` struct. However, we can't use that information as is for +//! compilation! A package typically contains several targets, or crates, +//! and these targets has inter-dependencies. For example, you need to +//! compile the `lib` target before the `bin` one, and you need to compile +//! `build.rs` before either of those. +//! +//! So, we need to lower the `Resolve`, which specifies dependencies between +//! *packages*, to a graph of dependencies between their *targets*, and this +//! is exactly what this module is doing! Well, almost exactly: another +//! complication is that we might want to compile the same target several times +//! (for example, with and without tests), so we actually build a dependency +//! graph of `Unit`s, which capture these properties. + +use crate::core::compiler::Unit; +use crate::core::compiler::{BuildContext, CompileMode, Context, Kind}; +use crate::core::dependency::Kind as DepKind; +use crate::core::package::Downloads; +use crate::core::profiles::UnitFor; +use crate::core::{Package, PackageId, Target}; +use crate::CargoResult; +use log::trace; +use std::collections::{HashMap, HashSet}; + +struct State<'a, 'cfg, 'tmp> { + cx: &'tmp mut Context<'a, 'cfg>, + waiting_on_download: HashSet, + downloads: Downloads<'a, 'cfg>, +} + +pub fn build_unit_dependencies<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + roots: &[Unit<'a>], +) -> CargoResult<()> { + assert!( + cx.unit_dependencies.is_empty(), + "can only build unit deps once" + ); + + let mut state = State { + downloads: cx.bcx.packages.enable_download()?, + cx, + waiting_on_download: HashSet::new(), + }; + + loop { + for unit in roots.iter() { + state.get(unit.pkg.package_id())?; + + // Dependencies of tests/benches should not have `panic` set. + // We check the global test mode to see if we are running in `cargo + // test` in which case we ensure all dependencies have `panic` + // cleared, and avoid building the lib thrice (once with `panic`, once + // without, once for `--test`). In particular, the lib included for + // Doc tests and examples are `Build` mode here. + let unit_for = if unit.mode.is_any_test() || state.cx.bcx.build_config.test() { + UnitFor::new_test() + } else if unit.target.is_custom_build() { + // This normally doesn't happen, except `clean` aggressively + // generates all units. + UnitFor::new_build() + } else if unit.target.for_host() { + // Proc macro / plugin should never have panic set. + UnitFor::new_compiler() + } else { + UnitFor::new_normal() + }; + deps_of(unit, &mut state, unit_for)?; + } + + if !state.waiting_on_download.is_empty() { + state.finish_some_downloads()?; + state.cx.unit_dependencies.clear(); + } else { + break; + } + } + + connect_run_custom_build_deps(&mut state); + + trace!("ALL UNIT DEPENDENCIES {:#?}", state.cx.unit_dependencies); + + record_units_requiring_metadata(state.cx); + + // Dependencies are used in tons of places throughout the backend, many of + // which affect the determinism of the build itself. As a result be sure + // that dependency lists are always sorted to ensure we've always got a + // deterministic output. + for list in state.cx.unit_dependencies.values_mut() { + list.sort(); + } + + Ok(()) +} + +fn deps_of<'a, 'cfg, 'tmp>( + unit: &Unit<'a>, + state: &mut State<'a, 'cfg, 'tmp>, + unit_for: UnitFor, +) -> CargoResult<()> { + // Currently the `unit_dependencies` map does not include `unit_for`. This should + // be safe for now. `TestDependency` only exists to clear the `panic` + // flag, and you'll never ask for a `unit` with `panic` set as a + // `TestDependency`. `CustomBuild` should also be fine since if the + // requested unit's settings are the same as `Any`, `CustomBuild` can't + // affect anything else in the hierarchy. + if !state.cx.unit_dependencies.contains_key(unit) { + let unit_deps = compute_deps(unit, state, unit_for)?; + let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect(); + state.cx.unit_dependencies.insert(*unit, to_insert); + for (unit, unit_for) in unit_deps { + deps_of(&unit, state, unit_for)?; + } + } + Ok(()) +} + +/// For a package, returns all targets that are registered as dependencies +/// for that package. +/// This returns a `Vec` of `(Unit, UnitFor)` pairs. The `UnitFor` +/// is the profile type that should be used for dependencies of the unit. +fn compute_deps<'a, 'cfg, 'tmp>( + unit: &Unit<'a>, + state: &mut State<'a, 'cfg, 'tmp>, + unit_for: UnitFor, +) -> CargoResult, UnitFor)>> { + if unit.mode.is_run_custom_build() { + return compute_deps_custom_build(unit, state.cx.bcx); + } else if unit.mode.is_doc() { + // Note: this does not include doc test. + return compute_deps_doc(unit, state); + } + + let bcx = state.cx.bcx; + let id = unit.pkg.package_id(); + let deps = bcx.resolve.deps(id).filter(|&(_id, deps)| { + assert!(!deps.is_empty()); + deps.iter().any(|dep| { + // If this target is a build command, then we only want build + // dependencies, otherwise we want everything *other than* build + // dependencies. + if unit.target.is_custom_build() != dep.is_build() { + return false; + } + + // If this dependency is **not** a transitive dependency, then it + // only applies to test/example targets. + if !dep.is_transitive() + && !unit.target.is_test() + && !unit.target.is_example() + && !unit.mode.is_any_test() + { + return false; + } + + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + if !bcx.dep_platform_activated(dep, unit.kind) { + return false; + } + + // If the dependency is optional, then we're only activating it + // if the corresponding feature was activated + if dep.is_optional() { + // Same for features this dependency is referenced + if let Some(platform) = bcx.resolve.features(id).get(&*dep.name_in_toml()) { + if !bcx.platform_activated(platform.as_ref(), unit.kind) { + return false; + } + } else { + return false; + } + } + + // If we've gotten past all that, then this dependency is + // actually used! + true + }) + }); + + let mut ret = Vec::new(); + for (id, _) in deps { + let pkg = match state.get(id)? { + Some(pkg) => pkg, + None => continue, + }; + let lib = match pkg.targets().iter().find(|t| t.is_lib()) { + Some(t) => t, + None => continue, + }; + let mode = check_or_build_mode(unit.mode, lib); + let dep_unit_for = unit_for.with_for_host(lib.for_host()); + + if bcx.config.cli_unstable().dual_proc_macros + && lib.proc_macro() + && unit.kind == Kind::Target + { + let unit = new_unit(bcx, pkg, lib, dep_unit_for, Kind::Target, mode); + ret.push((unit, dep_unit_for)); + let unit = new_unit(bcx, pkg, lib, dep_unit_for, Kind::Host, mode); + ret.push((unit, dep_unit_for)); + } else { + let unit = new_unit(bcx, pkg, lib, dep_unit_for, unit.kind.for_target(lib), mode); + ret.push((unit, dep_unit_for)); + } + } + + // If this target is a build script, then what we've collected so far is + // all we need. If this isn't a build script, then it depends on the + // build script if there is one. + if unit.target.is_custom_build() { + return Ok(ret); + } + ret.extend(dep_build_script(unit, bcx)); + + // If this target is a binary, test, example, etc, then it depends on + // the library of the same package. The call to `resolve.deps` above + // didn't include `pkg` in the return values, so we need to special case + // it here and see if we need to push `(pkg, pkg_lib_target)`. + if unit.target.is_lib() && unit.mode != CompileMode::Doctest { + return Ok(ret); + } + ret.extend(maybe_lib(unit, bcx, unit_for)); + + // If any integration tests/benches are being run, make sure that + // binaries are built as well. + if !unit.mode.is_check() + && unit.mode.is_any_test() + && (unit.target.is_test() || unit.target.is_bench()) + { + ret.extend( + unit.pkg + .targets() + .iter() + .filter(|t| { + let no_required_features = Vec::new(); + + t.is_bin() && + // Skip binaries with required features that have not been selected. + t.required_features().unwrap_or(&no_required_features).iter().all(|f| { + bcx.resolve.features(id).contains_key(f) && bcx.platform_activated(bcx.resolve.features(id).get(f).unwrap().as_ref(), unit.kind) + }) + }) + .map(|t| { + ( + new_unit( + bcx, + unit.pkg, + t, + UnitFor::new_normal(), + unit.kind.for_target(t), + CompileMode::Build, + ), + UnitFor::new_normal(), + ) + }), + ); + } + + Ok(ret) +} + +/// Returns the dependencies needed to run a build script. +/// +/// The `unit` provided must represent an execution of a build script, and +/// the returned set of units must all be run before `unit` is run. +fn compute_deps_custom_build<'a, 'cfg>( + unit: &Unit<'a>, + bcx: &BuildContext<'a, 'cfg>, +) -> CargoResult, UnitFor)>> { + // When not overridden, then the dependencies to run a build script are: + // + // 1. Compiling the build script itself. + // 2. For each immediate dependency of our package which has a `links` + // key, the execution of that build script. + // + // We don't have a great way of handling (2) here right now so this is + // deferred until after the graph of all unit dependencies has been + // constructed. + let unit = new_unit( + bcx, + unit.pkg, + unit.target, + UnitFor::new_build(), + // Build scripts always compiled for the host. + Kind::Host, + CompileMode::Build, + ); + // All dependencies of this unit should use profiles for custom + // builds. + Ok(vec![(unit, UnitFor::new_build())]) +} + +/// Returns the dependencies necessary to document a package. +fn compute_deps_doc<'a, 'cfg, 'tmp>( + unit: &Unit<'a>, + state: &mut State<'a, 'cfg, 'tmp>, +) -> CargoResult, UnitFor)>> { + let bcx = state.cx.bcx; + let deps = bcx + .resolve + .deps(unit.pkg.package_id()) + .filter(|&(_id, deps)| { + deps.iter().any(|dep| match dep.kind() { + DepKind::Normal => bcx.dep_platform_activated(dep, unit.kind), + _ => false, + }) + }); + + // To document a library, we depend on dependencies actually being + // built. If we're documenting *all* libraries, then we also depend on + // the documentation of the library being built. + let mut ret = Vec::new(); + for (id, _deps) in deps { + let dep = match state.get(id)? { + Some(dep) => dep, + None => continue, + }; + let lib = match dep.targets().iter().find(|t| t.is_lib()) { + Some(lib) => lib, + None => continue, + }; + // Rustdoc only needs rmeta files for regular dependencies. + // However, for plugins/proc macros, deps should be built like normal. + let mode = check_or_build_mode(unit.mode, lib); + let dep_unit_for = UnitFor::new_normal().with_for_host(lib.for_host()); + let lib_unit = new_unit(bcx, dep, lib, dep_unit_for, unit.kind.for_target(lib), mode); + ret.push((lib_unit, dep_unit_for)); + if let CompileMode::Doc { deps: true } = unit.mode { + // Document this lib as well. + let doc_unit = new_unit( + bcx, + dep, + lib, + dep_unit_for, + unit.kind.for_target(lib), + unit.mode, + ); + ret.push((doc_unit, dep_unit_for)); + } + } + + // Be sure to build/run the build script for documented libraries. + ret.extend(dep_build_script(unit, bcx)); + + // If we document a binary/example, we need the library available. + if unit.target.is_bin() || unit.target.is_example() { + ret.extend(maybe_lib(unit, bcx, UnitFor::new_normal())); + } + Ok(ret) +} + +fn maybe_lib<'a>( + unit: &Unit<'a>, + bcx: &BuildContext<'a, '_>, + unit_for: UnitFor, +) -> Option<(Unit<'a>, UnitFor)> { + unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| { + let mode = check_or_build_mode(unit.mode, t); + let unit = new_unit(bcx, unit.pkg, t, unit_for, unit.kind.for_target(t), mode); + (unit, unit_for) + }) +} + +/// If a build script is scheduled to be run for the package specified by +/// `unit`, this function will return the unit to run that build script. +/// +/// Overriding a build script simply means that the running of the build +/// script itself doesn't have any dependencies, so even in that case a unit +/// of work is still returned. `None` is only returned if the package has no +/// build script. +fn dep_build_script<'a>( + unit: &Unit<'a>, + bcx: &BuildContext<'a, '_>, +) -> Option<(Unit<'a>, UnitFor)> { + unit.pkg + .targets() + .iter() + .find(|t| t.is_custom_build()) + .map(|t| { + // The profile stored in the Unit is the profile for the thing + // the custom build script is running for. + let unit = bcx.units.intern( + unit.pkg, + t, + bcx.profiles.get_profile_run_custom_build(&unit.profile), + unit.kind, + CompileMode::RunCustomBuild, + ); + + (unit, UnitFor::new_build()) + }) +} + +/// Choose the correct mode for dependencies. +fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode { + match mode { + CompileMode::Check { .. } | CompileMode::Doc { .. } => { + if target.for_host() { + // Plugin and proc macro targets should be compiled like + // normal. + CompileMode::Build + } else { + // Regular dependencies should not be checked with --test. + // Regular dependencies of doc targets should emit rmeta only. + CompileMode::Check { test: false } + } + } + _ => CompileMode::Build, + } +} + +fn new_unit<'a>( + bcx: &BuildContext<'a, '_>, + pkg: &'a Package, + target: &'a Target, + unit_for: UnitFor, + kind: Kind, + mode: CompileMode, +) -> Unit<'a> { + let profile = bcx.profiles.get_profile( + pkg.package_id(), + bcx.ws.is_member(pkg), + unit_for, + mode, + bcx.build_config.release, + ); + + bcx.units.intern(pkg, target, profile, kind, mode) +} + +/// Fill in missing dependencies for units of the `RunCustomBuild` +/// +/// As mentioned above in `compute_deps_custom_build` each build script +/// execution has two dependencies. The first is compiling the build script +/// itself (already added) and the second is that all crates the package of the +/// build script depends on with `links` keys, their build script execution. (a +/// bit confusing eh?) +/// +/// Here we take the entire `deps` map and add more dependencies from execution +/// of one build script to execution of another build script. +fn connect_run_custom_build_deps(state: &mut State<'_, '_, '_>) { + let mut new_deps = Vec::new(); + + { + // First up build a reverse dependency map. This is a mapping of all + // `RunCustomBuild` known steps to the unit which depends on them. For + // example a library might depend on a build script, so this map will + // have the build script as the key and the library would be in the + // value's set. + let mut reverse_deps = HashMap::new(); + for (unit, deps) in state.cx.unit_dependencies.iter() { + for dep in deps { + if dep.mode == CompileMode::RunCustomBuild { + reverse_deps + .entry(dep) + .or_insert_with(HashSet::new) + .insert(unit); + } + } + } + + // Next, we take a look at all build scripts executions listed in the + // dependency map. Our job here is to take everything that depends on + // this build script (from our reverse map above) and look at the other + // package dependencies of these parents. + // + // If we depend on a linkable target and the build script mentions + // `links`, then we depend on that package's build script! Here we use + // `dep_build_script` to manufacture an appropriate build script unit to + // depend on. + for unit in state + .cx + .unit_dependencies + .keys() + .filter(|k| k.mode == CompileMode::RunCustomBuild) + { + let reverse_deps = match reverse_deps.get(unit) { + Some(set) => set, + None => continue, + }; + + let to_add = reverse_deps + .iter() + .flat_map(|reverse_dep| state.cx.unit_dependencies[reverse_dep].iter()) + .filter(|other| { + other.pkg != unit.pkg + && other.target.linkable() + && other.pkg.manifest().links().is_some() + }) + .filter_map(|other| dep_build_script(other, state.cx.bcx).map(|p| p.0)) + .collect::>(); + + if !to_add.is_empty() { + new_deps.push((*unit, to_add)); + } + } + } + + // And finally, add in all the missing dependencies! + for (unit, new_deps) in new_deps { + state + .cx + .unit_dependencies + .get_mut(&unit) + .unwrap() + .extend(new_deps); + } +} + +/// Records the list of units which are required to emit metadata. +/// +/// Units which depend only on the metadata of others requires the others to +/// actually produce metadata, so we'll record that here. +fn record_units_requiring_metadata(cx: &mut Context<'_, '_>) { + for (key, deps) in cx.unit_dependencies.iter() { + for dep in deps { + if cx.only_requires_rmeta(key, dep) { + cx.rmeta_required.insert(*dep); + } + } + } +} + +impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> { + fn get(&mut self, id: PackageId) -> CargoResult> { + if let Some(pkg) = self.cx.package_cache.get(&id) { + return Ok(Some(pkg)); + } + if !self.waiting_on_download.insert(id) { + return Ok(None); + } + if let Some(pkg) = self.downloads.start(id)? { + self.cx.package_cache.insert(id, pkg); + self.waiting_on_download.remove(&id); + return Ok(Some(pkg)); + } + Ok(None) + } + + /// Completes at least one downloading, maybe waiting for more to complete. + /// + /// This function will block the current thread waiting for at least one + /// crate to finish downloading. The function may continue to download more + /// crates if it looks like there's a long enough queue of crates to keep + /// downloading. When only a handful of packages remain this function + /// returns, and it's hoped that by returning we'll be able to push more + /// packages to download into the queue. + fn finish_some_downloads(&mut self) -> CargoResult<()> { + assert!(self.downloads.remaining() > 0); + loop { + let pkg = self.downloads.wait()?; + self.waiting_on_download.remove(&pkg.package_id()); + self.cx.package_cache.insert(pkg.package_id(), pkg); + + // Arbitrarily choose that 5 or more packages concurrently download + // is a good enough number to "fill the network pipe". If we have + // less than this let's recompute the whole unit dependency graph + // again and try to find some more packages to download. + if self.downloads.remaining() < 5 { + break; + } + } + Ok(()) + } +} diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs new file mode 100644 index 00000000000..f991e961b3e --- /dev/null +++ b/src/cargo/core/compiler/custom_build.rs @@ -0,0 +1,732 @@ +use std::collections::hash_map::{Entry, HashMap}; +use std::collections::{BTreeSet, HashSet}; +use std::fs; +use std::path::{Path, PathBuf}; +use std::str; +use std::sync::{Arc, Mutex}; + +use crate::core::compiler::job_queue::JobState; +use crate::core::PackageId; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::machine_message::{self, Message}; +use crate::util::Cfg; +use crate::util::{self, internal, paths, profile}; + +use super::job::{Freshness, Job, Work}; +use super::{fingerprint, Context, Kind, TargetConfig, Unit}; + +/// Contains the parsed output of a custom build script. +#[derive(Clone, Debug, Hash)] +pub struct BuildOutput { + /// Paths to pass to rustc with the `-L` flag. + pub library_paths: Vec, + /// Names and link kinds of libraries, suitable for the `-l` flag. + pub library_links: Vec, + /// Linker arguments suitable to be passed to `-C link-arg=` + pub linker_args: Vec, + /// Various `--cfg` flags to pass to the compiler. + pub cfgs: Vec, + /// Additional environment variables to run the compiler with. + pub env: Vec<(String, String)>, + /// Metadata to pass to the immediate dependencies. + pub metadata: Vec<(String, String)>, + /// Paths to trigger a rerun of this build script. + /// May be absolute or relative paths (relative to package root). + pub rerun_if_changed: Vec, + /// Environment variables which, when changed, will cause a rebuild. + pub rerun_if_env_changed: Vec, + /// Warnings generated by this build. + pub warnings: Vec, +} + +/// Map of packages to build info. +pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>; + +/// Build info and overrides. +pub struct BuildState { + pub outputs: Mutex, + overrides: HashMap<(String, Kind), BuildOutput>, +} + +#[derive(Default)] +pub struct BuildScripts { + // Cargo will use this `to_link` vector to add `-L` flags to compiles as we + // propagate them upwards towards the final build. Note, however, that we + // need to preserve the ordering of `to_link` to be topologically sorted. + // This will ensure that build scripts which print their paths properly will + // correctly pick up the files they generated (if there are duplicates + // elsewhere). + // + // To preserve this ordering, the (id, kind) is stored in two places, once + // in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain + // this as we're building interactively below to ensure that the memory + // usage here doesn't blow up too much. + // + // For more information, see #2354. + pub to_link: Vec<(PackageId, Kind)>, + seen_to_link: HashSet<(PackageId, Kind)>, + pub plugins: BTreeSet, +} + +#[derive(Debug)] +pub struct BuildDeps { + pub build_script_output: PathBuf, + pub rerun_if_changed: Vec, + pub rerun_if_env_changed: Vec, +} + +/// Prepares a `Work` that executes the target as a custom build script. +/// +/// The `req` given is the requirement which this run of the build script will +/// prepare work for. If the requirement is specified as both the target and the +/// host platforms it is assumed that the two are equal and the build script is +/// only run once (not twice). +pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { + let _p = profile::start(format!( + "build script prepare: {}/{}", + unit.pkg, + unit.target.name() + )); + + let key = (unit.pkg.package_id(), unit.kind); + + if cx.build_script_overridden.contains(&key) { + fingerprint::prepare_target(cx, unit, false) + } else { + build_work(cx, unit) + } +} + +fn emit_build_output(state: &JobState<'_>, output: &BuildOutput, package_id: PackageId) { + let library_paths = output + .library_paths + .iter() + .map(|l| l.display().to_string()) + .collect::>(); + + let msg = machine_message::BuildScript { + package_id, + linked_libs: &output.library_links, + linked_paths: &library_paths, + cfgs: &output.cfgs, + env: &output.env, + } + .to_json_string(); + state.stdout(msg); +} + +fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { + assert!(unit.mode.is_run_custom_build()); + let bcx = &cx.bcx; + let dependencies = cx.dep_targets(unit); + let build_script_unit = dependencies + .iter() + .find(|d| !d.mode.is_run_custom_build() && d.target.is_custom_build()) + .expect("running a script not depending on an actual script"); + let script_dir = cx.files().build_script_dir(build_script_unit); + let script_out_dir = cx.files().build_script_out_dir(unit); + let script_run_dir = cx.files().build_script_run_dir(unit); + let build_plan = bcx.build_config.build_plan; + let invocation_name = unit.buildkey(); + + if let Some(deps) = unit.pkg.manifest().metabuild() { + prepare_metabuild(cx, build_script_unit, deps)?; + } + + // Building the command to execute + let to_exec = script_dir.join(unit.target.name()); + + // Start preparing the process to execute, starting out with some + // environment variables. Note that the profile-related environment + // variables are not set with this the build script's profile but rather the + // package's library profile. + // NOTE: if you add any profile flags, be sure to update + // `Profiles::get_profile_run_custom_build` so that those flags get + // carried over. + let to_exec = to_exec.into_os_string(); + let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?; + let debug = unit.profile.debuginfo.unwrap_or(0) != 0; + cmd.env("OUT_DIR", &script_out_dir) + .env("CARGO_MANIFEST_DIR", unit.pkg.root()) + .env("NUM_JOBS", &bcx.jobs().to_string()) + .env( + "TARGET", + &match unit.kind { + Kind::Host => bcx.host_triple(), + Kind::Target => bcx.target_triple(), + }, + ) + .env("DEBUG", debug.to_string()) + .env("OPT_LEVEL", &unit.profile.opt_level.to_string()) + .env( + "PROFILE", + if bcx.build_config.release { + "release" + } else { + "debug" + }, + ) + .env("HOST", &bcx.host_triple()) + .env("RUSTC", &bcx.rustc.path) + .env("RUSTDOC", &*bcx.config.rustdoc()?) + .inherit_jobserver(&cx.jobserver); + + if let Some(ref linker) = bcx.target_config.linker { + cmd.env("RUSTC_LINKER", linker); + } + + if let Some(links) = unit.pkg.manifest().links() { + cmd.env("CARGO_MANIFEST_LINKS", links); + } + + // Be sure to pass along all enabled features for this package, this is the + // last piece of statically known information that we have. + for feat in bcx.resolve.features(unit.pkg.package_id()).iter() { + cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat.0)), "1"); + } + + let mut cfg_map = HashMap::new(); + for cfg in bcx.cfg(unit.kind) { + match *cfg { + Cfg::Name(ref n) => { + cfg_map.insert(n.clone(), None); + } + Cfg::KeyPair(ref k, ref v) => { + if let Some(ref mut values) = + *cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new())) + { + values.push(v.clone()) + } + } + } + } + for (k, v) in cfg_map { + let k = format!("CARGO_CFG_{}", super::envify(&k)); + match v { + Some(list) => { + cmd.env(&k, list.join(",")); + } + None => { + cmd.env(&k, ""); + } + } + } + + // Gather the set of native dependencies that this package has along with + // some other variables to close over. + // + // This information will be used at build-time later on to figure out which + // sorts of variables need to be discovered at that time. + let lib_deps = { + dependencies + .iter() + .filter_map(|unit| { + if unit.mode.is_run_custom_build() { + Some(( + unit.pkg.manifest().links().unwrap().to_string(), + unit.pkg.package_id(), + )) + } else { + None + } + }) + .collect::>() + }; + let pkg_name = unit.pkg.to_string(); + let build_state = Arc::clone(&cx.build_state); + let id = unit.pkg.package_id(); + let output_file = script_run_dir.join("output"); + let err_file = script_run_dir.join("stderr"); + let root_output_file = script_run_dir.join("root-output"); + let host_target_root = cx.files().host_root().to_path_buf(); + let all = ( + id, + pkg_name.clone(), + Arc::clone(&build_state), + output_file.clone(), + script_out_dir.clone(), + ); + let build_scripts = super::load_build_deps(cx, unit); + let kind = unit.kind; + let json_messages = bcx.build_config.emit_json(); + let extra_verbose = bcx.config.extra_verbose(); + let (prev_output, prev_script_out_dir) = prev_build_output(cx, unit); + + fs::create_dir_all(&script_dir)?; + fs::create_dir_all(&script_out_dir)?; + + // Prepare the unit of "dirty work" which will actually run the custom build + // command. + // + // Note that this has to do some extra work just before running the command + // to determine extra environment variables and such. + let dirty = Work::new(move |state| { + // Make sure that OUT_DIR exists. + // + // If we have an old build directory, then just move it into place, + // otherwise create it! + if fs::metadata(&script_out_dir).is_err() { + fs::create_dir(&script_out_dir).chain_err(|| { + internal( + "failed to create script output directory for \ + build command", + ) + })?; + } + + // For all our native lib dependencies, pick up their metadata to pass + // along to this custom build command. We're also careful to augment our + // dynamic library search path in case the build script depended on any + // native dynamic libraries. + if !build_plan { + let build_state = build_state.outputs.lock().unwrap(); + for (name, id) in lib_deps { + let key = (id, kind); + let state = build_state.get(&key).ok_or_else(|| { + internal(format!( + "failed to locate build state for env \ + vars: {}/{:?}", + id, kind + )) + })?; + let data = &state.metadata; + for &(ref key, ref value) in data.iter() { + cmd.env( + &format!("DEP_{}_{}", super::envify(&name), super::envify(key)), + value, + ); + } + } + if let Some(build_scripts) = build_scripts { + super::add_plugin_deps(&mut cmd, &build_state, &build_scripts, &host_target_root)?; + } + } + + if build_plan { + state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new())); + return Ok(()); + } + + // And now finally, run the build command itself! + state.running(&cmd); + let timestamp = paths::set_invocation_time(&script_run_dir)?; + let prefix = format!("[{} {}] ", id.name(), id.version()); + let output = cmd + .exec_with_streaming( + &mut |stdout| { + if extra_verbose { + state.stdout(format!("{}{}", prefix, stdout)); + } + Ok(()) + }, + &mut |stderr| { + if extra_verbose { + state.stderr(format!("{}{}", prefix, stderr)); + } + Ok(()) + }, + true, + ) + .chain_err(|| format!("failed to run custom build command for `{}`", pkg_name))?; + + // After the build command has finished running, we need to be sure to + // remember all of its output so we can later discover precisely what it + // was, even if we don't run the build command again (due to freshness). + // + // This is also the location where we provide feedback into the build + // state informing what variables were discovered via our script as + // well. + paths::write(&output_file, &output.stdout)?; + filetime::set_file_times(output_file, timestamp, timestamp)?; + paths::write(&err_file, &output.stderr)?; + paths::write(&root_output_file, util::path2bytes(&script_out_dir)?)?; + let parsed_output = + BuildOutput::parse(&output.stdout, &pkg_name, &script_out_dir, &script_out_dir)?; + + if json_messages { + emit_build_output(state, &parsed_output, id); + } + build_state.insert(id, kind, parsed_output); + Ok(()) + }); + + // Now that we've prepared our work-to-do, we need to prepare the fresh work + // itself to run when we actually end up just discarding what we calculated + // above. + let fresh = Work::new(move |state| { + let (id, pkg_name, build_state, output_file, script_out_dir) = all; + let output = match prev_output { + Some(output) => output, + None => BuildOutput::parse_file( + &output_file, + &pkg_name, + &prev_script_out_dir, + &script_out_dir, + )?, + }; + + if json_messages { + emit_build_output(state, &output, id); + } + + build_state.insert(id, kind, output); + Ok(()) + }); + + let mut job = if cx.bcx.build_config.build_plan { + Job::new(Work::noop(), Freshness::Dirty) + } else { + fingerprint::prepare_target(cx, unit, false)? + }; + if job.freshness() == Freshness::Dirty { + job.before(dirty); + } else { + job.before(fresh); + } + Ok(job) +} + +impl BuildState { + pub fn new(host_config: &TargetConfig, target_config: &TargetConfig) -> BuildState { + let mut overrides = HashMap::new(); + let i1 = host_config.overrides.iter().map(|p| (p, Kind::Host)); + let i2 = target_config.overrides.iter().map(|p| (p, Kind::Target)); + for ((name, output), kind) in i1.chain(i2) { + overrides.insert((name.clone(), kind), output.clone()); + } + BuildState { + outputs: Mutex::new(HashMap::new()), + overrides, + } + } + + fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) { + self.outputs.lock().unwrap().insert((id, kind), output); + } +} + +impl BuildOutput { + pub fn parse_file( + path: &Path, + pkg_name: &str, + script_out_dir_when_generated: &Path, + script_out_dir: &Path, + ) -> CargoResult { + let contents = paths::read_bytes(path)?; + BuildOutput::parse( + &contents, + pkg_name, + script_out_dir_when_generated, + script_out_dir, + ) + } + + // Parses the output of a script. + // The `pkg_name` is used for error messages. + pub fn parse( + input: &[u8], + pkg_name: &str, + script_out_dir_when_generated: &Path, + script_out_dir: &Path, + ) -> CargoResult { + let mut library_paths = Vec::new(); + let mut library_links = Vec::new(); + let mut linker_args = Vec::new(); + let mut cfgs = Vec::new(); + let mut env = Vec::new(); + let mut metadata = Vec::new(); + let mut rerun_if_changed = Vec::new(); + let mut rerun_if_env_changed = Vec::new(); + let mut warnings = Vec::new(); + let whence = format!("build script of `{}`", pkg_name); + + for line in input.split(|b| *b == b'\n') { + let line = match str::from_utf8(line) { + Ok(line) => line.trim(), + Err(..) => continue, + }; + let mut iter = line.splitn(2, ':'); + if iter.next() != Some("cargo") { + // skip this line since it doesn't start with "cargo:" + continue; + } + let data = match iter.next() { + Some(val) => val, + None => continue, + }; + + // getting the `key=value` part of the line + let mut iter = data.splitn(2, '='); + let key = iter.next(); + let value = iter.next(); + let (key, value) = match (key, value) { + (Some(a), Some(b)) => (a, b.trim_end()), + // Line started with `cargo:` but didn't match `key=value`. + _ => failure::bail!("Wrong output in {}: `{}`", whence, line), + }; + + // This will rewrite paths if the target directory has been moved. + let value = value.replace( + script_out_dir_when_generated.to_str().unwrap(), + script_out_dir.to_str().unwrap(), + ); + + match key { + "rustc-flags" => { + let (paths, links) = BuildOutput::parse_rustc_flags(&value, &whence)?; + library_links.extend(links.into_iter()); + library_paths.extend(paths.into_iter()); + } + "rustc-link-lib" => library_links.push(value.to_string()), + "rustc-link-search" => library_paths.push(PathBuf::from(value)), + "rustc-cdylib-link-arg" => linker_args.push(value.to_string()), + "rustc-cfg" => cfgs.push(value.to_string()), + "rustc-env" => env.push(BuildOutput::parse_rustc_env(&value, &whence)?), + "warning" => warnings.push(value.to_string()), + "rerun-if-changed" => rerun_if_changed.push(PathBuf::from(value)), + "rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()), + _ => metadata.push((key.to_string(), value.to_string())), + } + } + + Ok(BuildOutput { + library_paths, + library_links, + linker_args, + cfgs, + env, + metadata, + rerun_if_changed, + rerun_if_env_changed, + warnings, + }) + } + + pub fn parse_rustc_flags( + value: &str, + whence: &str, + ) -> CargoResult<(Vec, Vec)> { + let value = value.trim(); + let mut flags_iter = value + .split(|c: char| c.is_whitespace()) + .filter(|w| w.chars().any(|c| !c.is_whitespace())); + let (mut library_paths, mut library_links) = (Vec::new(), Vec::new()); + while let Some(flag) = flags_iter.next() { + if flag != "-l" && flag != "-L" { + failure::bail!( + "Only `-l` and `-L` flags are allowed in {}: `{}`", + whence, + value + ) + } + let value = match flags_iter.next() { + Some(v) => v, + None => failure::bail!( + "Flag in rustc-flags has no value in {}: `{}`", + whence, + value + ), + }; + match flag { + "-l" => library_links.push(value.to_string()), + "-L" => library_paths.push(PathBuf::from(value)), + + // was already checked above + _ => failure::bail!("only -l and -L flags are allowed"), + }; + } + Ok((library_paths, library_links)) + } + + pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> { + let mut iter = value.splitn(2, '='); + let name = iter.next(); + let val = iter.next(); + match (name, val) { + (Some(n), Some(v)) => Ok((n.to_owned(), v.to_owned())), + _ => failure::bail!("Variable rustc-env has no value in {}: {}", whence, value), + } + } +} + +fn prepare_metabuild<'a, 'cfg>( + cx: &Context<'a, 'cfg>, + unit: &Unit<'a>, + deps: &[String], +) -> CargoResult<()> { + let mut output = Vec::new(); + let available_deps = cx.dep_targets(unit); + // Filter out optional dependencies, and look up the actual lib name. + let meta_deps: Vec<_> = deps + .iter() + .filter_map(|name| { + available_deps + .iter() + .find(|u| u.pkg.name().as_str() == name.as_str()) + .map(|dep| dep.target.crate_name()) + }) + .collect(); + for dep in &meta_deps { + output.push(format!("use {};\n", dep)); + } + output.push("fn main() {\n".to_string()); + for dep in &meta_deps { + output.push(format!(" {}::metabuild();\n", dep)); + } + output.push("}\n".to_string()); + let output = output.join(""); + let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); + fs::create_dir_all(path.parent().unwrap())?; + paths::write_if_changed(path, &output)?; + Ok(()) +} + +impl BuildDeps { + pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps { + BuildDeps { + build_script_output: output_file.to_path_buf(), + rerun_if_changed: output + .map(|p| &p.rerun_if_changed) + .cloned() + .unwrap_or_default(), + rerun_if_env_changed: output + .map(|p| &p.rerun_if_env_changed) + .cloned() + .unwrap_or_default(), + } + } +} + +/// Computes the `build_scripts` map in the `Context` which tracks what build +/// scripts each package depends on. +/// +/// The global `build_scripts` map lists for all (package, kind) tuples what set +/// of packages' build script outputs must be considered. For example this lists +/// all dependencies' `-L` flags which need to be propagated transitively. +/// +/// The given set of targets to this function is the initial set of +/// targets/profiles which are being built. +pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> { + let mut ret = HashMap::new(); + for unit in units { + build(&mut ret, cx, unit)?; + } + cx.build_scripts + .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v)))); + return Ok(()); + + // Recursive function to build up the map we're constructing. This function + // memoizes all of its return values as it goes along. + fn build<'a, 'b, 'cfg>( + out: &'a mut HashMap, BuildScripts>, + cx: &mut Context<'b, 'cfg>, + unit: &Unit<'b>, + ) -> CargoResult<&'a BuildScripts> { + // Do a quick pre-flight check to see if we've already calculated the + // set of dependencies. + if out.contains_key(unit) { + return Ok(&out[unit]); + } + + let key = unit + .pkg + .manifest() + .links() + .map(|l| (l.to_string(), unit.kind)); + let build_state = &cx.build_state; + if let Some(output) = key.and_then(|k| build_state.overrides.get(&k)) { + let key = (unit.pkg.package_id(), unit.kind); + cx.build_script_overridden.insert(key); + build_state + .outputs + .lock() + .unwrap() + .insert(key, output.clone()); + } + + let mut ret = BuildScripts::default(); + + if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { + add_to_link(&mut ret, unit.pkg.package_id(), unit.kind); + } + + if unit.mode.is_run_custom_build() { + parse_previous_explicit_deps(cx, unit)?; + } + + // We want to invoke the compiler deterministically to be cache-friendly + // to rustc invocation caching schemes, so be sure to generate the same + // set of build script dependency orderings via sorting the targets that + // come out of the `Context`. + let mut targets = cx.dep_targets(unit); + targets.sort_by_key(|u| u.pkg.package_id()); + + for unit in targets.iter() { + let dep_scripts = build(out, cx, unit)?; + + if unit.target.for_host() { + ret.plugins + .extend(dep_scripts.to_link.iter().map(|p| &p.0).cloned()); + } else if unit.target.linkable() { + for &(pkg, kind) in dep_scripts.to_link.iter() { + add_to_link(&mut ret, pkg, kind); + } + } + } + + match out.entry(*unit) { + Entry::Vacant(entry) => Ok(entry.insert(ret)), + Entry::Occupied(_) => panic!("cyclic dependencies in `build_map`"), + } + } + + // When adding an entry to 'to_link' we only actually push it on if the + // script hasn't seen it yet (e.g., we don't push on duplicates). + fn add_to_link(scripts: &mut BuildScripts, pkg: PackageId, kind: Kind) { + if scripts.seen_to_link.insert((pkg, kind)) { + scripts.to_link.push((pkg, kind)); + } + } + + fn parse_previous_explicit_deps<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, + ) -> CargoResult<()> { + let script_run_dir = cx.files().build_script_run_dir(unit); + let output_file = script_run_dir.join("output"); + let (prev_output, _) = prev_build_output(cx, unit); + let deps = BuildDeps::new(&output_file, prev_output.as_ref()); + cx.build_explicit_deps.insert(*unit, deps); + Ok(()) + } +} + +/// Returns the previous parsed `BuildOutput`, if any, from a previous +/// execution. +/// +/// Also returns the directory containing the output, typically used later in +/// processing. +fn prev_build_output<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> (Option, PathBuf) { + let script_out_dir = cx.files().build_script_out_dir(unit); + let script_run_dir = cx.files().build_script_run_dir(unit); + let root_output_file = script_run_dir.join("root-output"); + let output_file = script_run_dir.join("output"); + + let prev_script_out_dir = paths::read_bytes(&root_output_file) + .and_then(|bytes| util::bytes2path(&bytes)) + .unwrap_or_else(|_| script_out_dir.clone()); + + ( + BuildOutput::parse_file( + &output_file, + &unit.pkg.to_string(), + &prev_script_out_dir, + &script_out_dir, + ) + .ok(), + prev_script_out_dir, + ) +} diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs new file mode 100644 index 00000000000..3ce3c04a0f4 --- /dev/null +++ b/src/cargo/core/compiler/fingerprint.rs @@ -0,0 +1,1629 @@ +//! # Fingerprints +//! +//! This module implements change-tracking so that Cargo can know whether or +//! not something needs to be recompiled. A Cargo `Unit` can be either "dirty" +//! (needs to be recompiled) or "fresh" (it does not need to be recompiled). +//! There are several mechanisms that influence a Unit's freshness: +//! +//! - The `Metadata` hash isolates each Unit on the filesystem by being +//! embedded in the filename. If something in the hash changes, then the +//! output files will be missing, and the Unit will be dirty (missing +//! outputs are considered "dirty"). +//! - The `Fingerprint` is another hash, saved to the filesystem in the +//! `.fingerprint` directory, that tracks information about the inputs to a +//! Unit. If any of the inputs changes from the last compilation, then the +//! Unit is considered dirty. A missing fingerprint (such as during the +//! first build) is also considered dirty. +//! - Whether or not input files are actually present. For example a build +//! script which says it depends on a nonexistent file `foo` is always rerun. +//! - Propagation throughout the dependency graph of file modification time +//! information, used to detect changes on the filesystem. Each `Fingerprint` +//! keeps track of what files it'll be processing, and when necessary it will +//! check the `mtime` of each file (last modification time) and compare it to +//! dependencies and output to see if files have been changed or if a change +//! needs to force recompiles of downstream dependencies. +//! +//! Note: Fingerprinting is not a perfect solution. Filesystem mtime tracking +//! is notoriously imprecise and problematic. Only a small part of the +//! environment is captured. This is a balance of performance, simplicity, and +//! completeness. Sandboxing, hashing file contents, tracking every file +//! access, environment variable, and network operation would ensure more +//! reliable and reproducible builds at the cost of being complex, slow, and +//! platform-dependent. +//! +//! ## Fingerprints and Metadata +//! +//! Fingerprints and Metadata are similar, and track some of the same things. +//! The Metadata contains information that is required to keep Units separate. +//! The Fingerprint includes additional information that should cause a +//! recompile, but it is desired to reuse the same filenames. Generally the +//! items in the Metadata do not need to be in the Fingerprint. A comparison +//! of what is tracked: +//! +//! Value | Fingerprint | Metadata +//! -------------------------------------------|-------------|---------- +//! rustc | ✓ | ✓ +//! Profile | ✓ | ✓ +//! `cargo rustc` extra args | ✓ | ✓ +//! CompileMode | ✓ | ✓ +//! Target Name | ✓ | ✓ +//! Target Kind (bin/lib/etc.) | ✓ | ✓ +//! Enabled Features | ✓ | ✓ +//! Immediate dependency’s hashes | ✓[^1] | ✓ +//! Target or Host mode | | ✓ +//! __CARGO_DEFAULT_LIB_METADATA[^4] | | ✓ +//! package_id | | ✓ +//! authors, description, homepage, repo | ✓ | +//! Target src path | ✓ | +//! Target path relative to ws | ✓ | +//! Target flags (test/bench/for_host/edition) | ✓ | +//! -C incremental=… flag | ✓ | +//! mtime of sources | ✓[^3] | +//! RUSTFLAGS/RUSTDOCFLAGS | ✓ | ✓ +//! +//! [^1]: Build script and bin dependencies are not included. +//! +//! [^3]: The mtime is only tracked for workspace members and path +//! dependencies. Git dependencies track the git revision. +//! +//! [^4]: `__CARGO_DEFAULT_LIB_METADATA` is set by rustbuild to embed the +//! release channel (bootstrap/stable/beta/nightly) in libstd. +//! +//! ## Fingerprint files +//! +//! Fingerprint information is stored in the +//! `target/{debug,release}/.fingerprint/` directory. Each Unit is stored in a +//! separate directory. Each Unit directory contains: +//! +//! - A file with a 16 hex-digit hash. This is the Fingerprint hash, used for +//! quick loading and comparison. +//! - A `.json` file that contains details about the Fingerprint. This is only +//! used to log details about *why* a fingerprint is considered dirty. +//! `CARGO_LOG=cargo::core::compiler::fingerprint=trace cargo build` can be +//! used to display this log information. +//! - A "dep-info" file which contains a list of source filenames for the +//! target. This is produced by reading the output of `rustc +//! --emit=dep-info` and packing it into a condensed format. Cargo uses this +//! to check the mtime of every file to see if any of them have changed. +//! - An `invoked.timestamp` file whose filesystem mtime is updated every time +//! the Unit is built. This is an experimental feature used for cleaning +//! unused artifacts. +//! +//! Note that some units are a little different. A Unit for *running* a build +//! script or for `rustdoc` does not have a dep-info file (it's not +//! applicable). Build script `invoked.timestamp` files are in the build +//! output directory. +//! +//! ## Fingerprint calculation +//! +//! After the list of Units has been calculated, the Units are added to the +//! `JobQueue`. As each one is added, the fingerprint is calculated, and the +//! dirty/fresh status is recorded. A closure is used to update the fingerprint +//! on-disk when the Unit successfully finishes. The closure will recompute the +//! Fingerprint based on the updated information. If the Unit fails to compile, +//! the fingerprint is not updated. +//! +//! Fingerprints are cached in the `Context`. This makes computing +//! Fingerprints faster, but also is necessary for properly updating +//! dependency information. Since a Fingerprint includes the Fingerprints of +//! all dependencies, when it is updated, by using `Arc` clones, it +//! automatically picks up the updates to its dependencies. +//! +//! ## Considerations for inclusion in a fingerprint +//! +//! Over time we've realized a few items which historically were included in +//! fingerprint hashings should not actually be included. Examples are: +//! +//! * Modification time values. We strive to never include a modification time +//! inside a `Fingerprint` to get hashed into an actual value. While +//! theoretically fine to do, in practice this causes issues with common +//! applications like Docker. Docker, after a layer is built, will zero out +//! the nanosecond part of all filesystem modification times. This means that +//! the actual modification time is different for all build artifacts, which +//! if we tracked the actual values of modification times would cause +//! unnecessary recompiles. To fix this we instead only track paths which are +//! relevant. These paths are checked dynamically to see if they're up to +//! date, and the modifiation time doesn't make its way into the fingerprint +//! hash. +//! +//! * Absolute path names. We strive to maintain a property where if you rename +//! a project directory Cargo will continue to preserve all build artifacts +//! and reuse the cache. This means that we can't ever hash an absolute path +//! name. Instead we always hash relative path names and the "root" is passed +//! in at runtime dynamically. Some of this is best effort, but the general +//! idea is that we assume all accesses within a crate stay within that +//! crate. +//! +//! These are pretty tricky to test for unfortunately, but we should have a good +//! test suite nowadays and lord knows Cargo gets enough testing in the wild! +//! +//! ## Build scripts +//! +//! The *running* of a build script (`CompileMode::RunCustomBuild`) is treated +//! significantly different than all other Unit kinds. It has its own function +//! for calculating the Fingerprint (`calculate_run_custom_build`) and has some +//! unique considerations. It does not track the same information as a normal +//! Unit. The information tracked depends on the `rerun-if-changed` and +//! `rerun-if-env-changed` statements produced by the build script. If the +//! script does not emit either of these statements, the Fingerprint runs in +//! "old style" mode where an mtime change of *any* file in the package will +//! cause the build script to be re-run. Otherwise, the fingerprint *only* +//! tracks the individual "rerun-if" items listed by the build script. +//! +//! The "rerun-if" statements from a *previous* build are stored in the build +//! output directory in a file called `output`. Cargo parses this file when +//! the Unit for that build script is prepared for the `JobQueue`. The +//! Fingerprint code can then use that information to compute the Fingerprint +//! and compare against the old fingerprint hash. +//! +//! Care must be taken with build script Fingerprints because the +//! `Fingerprint::local` value may be changed after the build script runs +//! (such as if the build script adds or removes "rerun-if" items). +//! +//! Another complication is if a build script is overridden. In that case, the +//! fingerprint is the hash of the output of the override. +//! +//! ## Special considerations +//! +//! Registry dependencies do not track the mtime of files. This is because +//! registry dependencies are not expected to change (if a new version is +//! used, the Package ID will change, causing a rebuild). Cargo currently +//! partially works with Docker caching. When a Docker image is built, it has +//! normal mtime information. However, when a step is cached, the nanosecond +//! portions of all files is zeroed out. Currently this works, but care must +//! be taken for situations like these. +//! +//! HFS on macOS only supports 1 second timestamps. This causes a significant +//! number of problems, particularly with Cargo's testsuite which does rapid +//! builds in succession. Other filesystems have various degrees of +//! resolution. +//! +//! Various weird filesystems (such as network filesystems) also can cause +//! complications. Network filesystems may track the time on the server +//! (except when the time is set manually such as with +//! `filetime::set_file_times`). Not all filesystems support modifying the +//! mtime. +//! +//! See the `A-rebuild-detection` flag on the issue tracker for more: +//! + +use std::collections::hash_map::{Entry, HashMap}; +use std::env; +use std::fs; +use std::hash::{self, Hasher}; +use std::path::{Path, PathBuf}; +use std::sync::{Arc, Mutex}; +use std::time::SystemTime; + +use failure::{bail, format_err}; +use filetime::FileTime; +use log::{debug, info}; +use serde::de; +use serde::ser; +use serde::{Deserialize, Serialize}; + +use crate::core::Package; +use crate::util; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::paths; +use crate::util::{internal, profile}; + +use super::custom_build::BuildDeps; +use super::job::{ + Freshness::{Dirty, Fresh}, + Job, Work, +}; +use super::{BuildContext, Context, FileFlavor, Unit}; + +/// Determines if a `unit` is up-to-date, and if not prepares necessary work to +/// update the persisted fingerprint. +/// +/// This function will inspect `unit`, calculate a fingerprint for it, and then +/// return an appropriate `Job` to run. The returned `Job` will be a noop if +/// `unit` is considered "fresh", or if it was previously built and cached. +/// Otherwise the `Job` returned will write out the true fingerprint to the +/// filesystem, to be executed after the unit's work has completed. +/// +/// The `force` flag is a way to force the `Job` to be "dirty", or always +/// update the fingerprint. **Beware using this flag** because it does not +/// transitively propagate throughout the dependency graph, it only forces this +/// one unit which is very unlikely to be what you want unless you're +/// exclusively talking about top-level units. +pub fn prepare_target<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, + force: bool, +) -> CargoResult { + let _p = profile::start(format!( + "fingerprint: {} / {}", + unit.pkg.package_id(), + unit.target.name() + )); + let bcx = cx.bcx; + let new = cx.files().fingerprint_dir(unit); + let loc = new.join(&filename(cx, unit)); + + debug!("fingerprint at: {}", loc.display()); + + // Figure out if this unit is up to date. After calculating the fingerprint + // compare it to an old version, if any, and attempt to print diagnostic + // information about failed comparisons to aid in debugging. + let fingerprint = calculate(cx, unit)?; + let mtime_on_use = cx.bcx.config.cli_unstable().mtime_on_use; + let compare = compare_old_fingerprint(&loc, &*fingerprint, mtime_on_use); + log_compare(unit, &compare); + + // If our comparison failed (e.g., we're going to trigger a rebuild of this + // crate), then we also ensure the source of the crate passes all + // verification checks before we build it. + // + // The `Source::verify` method is intended to allow sources to execute + // pre-build checks to ensure that the relevant source code is all + // up-to-date and as expected. This is currently used primarily for + // directory sources which will use this hook to perform an integrity check + // on all files in the source to ensure they haven't changed. If they have + // changed then an error is issued. + if compare.is_err() { + let source_id = unit.pkg.package_id().source_id(); + let sources = bcx.packages.sources(); + let source = sources + .get(source_id) + .ok_or_else(|| internal("missing package source"))?; + source.verify(unit.pkg.package_id())?; + } + + if compare.is_ok() && !force { + return Ok(Job::new(Work::noop(), Fresh)); + } + + let write_fingerprint = if unit.mode.is_run_custom_build() { + // For build scripts the `local` field of the fingerprint may change + // while we're executing it. For example it could be in the legacy + // "consider everything a dependency mode" and then we switch to "deps + // are explicitly specified" mode. + // + // To handle this movement we need to regenerate the `local` field of a + // build script's fingerprint after it's executed. We do this by + // using the `build_script_local_fingerprints` function which returns a + // thunk we can invoke on a foreign thread to calculate this. + let state = Arc::clone(&cx.build_state); + let key = (unit.pkg.package_id(), unit.kind); + let (gen_local, _overridden) = build_script_local_fingerprints(cx, unit); + let output_path = cx.build_explicit_deps[unit].build_script_output.clone(); + Work::new(move |_| { + let outputs = state.outputs.lock().unwrap(); + let outputs = &outputs[&key]; + let deps = BuildDeps::new(&output_path, Some(outputs)); + + // FIXME: it's basically buggy that we pass `None` to `call_box` + // here. See documentation on `build_script_local_fingerprints` + // below for more information. Despite this just try to proceed and + // hobble along if it happens to return `Some`. + if let Some(new_local) = (gen_local)(&deps, None)? { + *fingerprint.local.lock().unwrap() = new_local; + *fingerprint.memoized_hash.lock().unwrap() = None; + } + + write_fingerprint(&loc, &fingerprint) + }) + } else { + Work::new(move |_| write_fingerprint(&loc, &fingerprint)) + }; + + Ok(Job::new(write_fingerprint, Dirty)) +} + +/// Dependency edge information for fingerprints. This is generated for each +/// unit in `dep_targets` and is stored in a `Fingerprint` below. +#[derive(Clone)] +struct DepFingerprint { + /// The hash of the package id that this dependency points to + pkg_id: u64, + /// The crate name we're using for this dependency, which if we change we'll + /// need to recompile! + name: String, + /// Whether or not this dependency is flagged as a public dependency or not. + public: bool, + /// Whether or not this dependency is an rmeta dependency or a "full" + /// dependency. In the case of an rmeta dependency our dependency edge only + /// actually requires the rmeta from what we depend on, so when checking + /// mtime information all files other than the rmeta can be ignored. + only_requires_rmeta: bool, + /// The dependency's fingerprint we recursively point to, containing all the + /// other hash information we'd otherwise need. + fingerprint: Arc, +} + +/// A fingerprint can be considered to be a "short string" representing the +/// state of a world for a package. +/// +/// If a fingerprint ever changes, then the package itself needs to be +/// recompiled. Inputs to the fingerprint include source code modifications, +/// compiler flags, compiler version, etc. This structure is not simply a +/// `String` due to the fact that some fingerprints cannot be calculated lazily. +/// +/// Path sources, for example, use the mtime of the corresponding dep-info file +/// as a fingerprint (all source files must be modified *before* this mtime). +/// This dep-info file is not generated, however, until after the crate is +/// compiled. As a result, this structure can be thought of as a fingerprint +/// to-be. The actual value can be calculated via `hash()`, but the operation +/// may fail as some files may not have been generated. +/// +/// Note that dependencies are taken into account for fingerprints because rustc +/// requires that whenever an upstream crate is recompiled that all downstream +/// dependants are also recompiled. This is typically tracked through +/// `DependencyQueue`, but it also needs to be retained here because Cargo can +/// be interrupted while executing, losing the state of the `DependencyQueue` +/// graph. +#[derive(Serialize, Deserialize)] +pub struct Fingerprint { + /// Hash of the version of `rustc` used. + rustc: u64, + /// Sorted list of cfg features enabled. + features: String, + /// Hash of the `Target` struct, including the target name, + /// package-relative source path, edition, etc. + target: u64, + /// Hash of the `Profile`, `CompileMode`, and any extra flags passed via + /// `cargo rustc` or `cargo rustdoc`. + profile: u64, + /// Hash of the path to the base source file. This is relative to the + /// workspace root for path members, or absolute for other sources. + path: u64, + /// Fingerprints of dependencies. + deps: Vec, + /// Information about the inputs that affect this Unit (such as source + /// file mtimes or build script environment variables). + local: Mutex>, + /// Cached hash of the `Fingerprint` struct. Used to improve performance + /// for hashing. + #[serde(skip)] + memoized_hash: Mutex>, + /// RUSTFLAGS/RUSTDOCFLAGS environment variable value (or config value). + rustflags: Vec, + /// Hash of some metadata from the manifest, such as "authors", or + /// "description", which are exposed as environment variables during + /// compilation. + metadata: u64, + /// Description of whether the filesystem status for this unit is up to date + /// or should be considered stale. + #[serde(skip)] + fs_status: FsStatus, + /// Files, relative to `target_root`, that are produced by the step that + /// this `Fingerprint` represents. This is used to detect when the whole + /// fingerprint is out of date if this is missing, or if previous + /// fingerprints output files are regenerated and look newer than this one. + #[serde(skip)] + outputs: Vec, +} + +/// Indication of the status on the filesystem for a particular unit. +enum FsStatus { + /// This unit is to be considered stale, even if hash information all + /// matches. The filesystem inputs have changed (or are missing) and the + /// unit needs to subsequently be recompiled. + Stale, + + /// This unit is up-to-date. All outputs and their corresponding mtime are + /// listed in the payload here for other dependencies to compare against. + UpToDate { mtimes: HashMap }, +} + +impl FsStatus { + fn up_to_date(&self) -> bool { + match self { + FsStatus::UpToDate { .. } => true, + FsStatus::Stale => false, + } + } +} + +impl Default for FsStatus { + fn default() -> FsStatus { + FsStatus::Stale + } +} + +impl Serialize for DepFingerprint { + fn serialize(&self, ser: S) -> Result + where + S: ser::Serializer, + { + ( + &self.pkg_id, + &self.name, + &self.public, + &self.fingerprint.hash(), + ) + .serialize(ser) + } +} + +impl<'de> Deserialize<'de> for DepFingerprint { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let (pkg_id, name, public, hash) = <(u64, String, bool, u64)>::deserialize(d)?; + Ok(DepFingerprint { + pkg_id, + name, + public, + fingerprint: Arc::new(Fingerprint { + memoized_hash: Mutex::new(Some(hash)), + ..Fingerprint::new() + }), + // This field is never read since it's only used in + // `check_filesystem` which isn't used by fingerprints loaded from + // disk. + only_requires_rmeta: false, + }) + } +} + +/// A `LocalFingerprint` represents something that we use to detect direct +/// changes to a `Fingerprint`. +/// +/// This is where we track file information, env vars, etc. This +/// `LocalFingerprint` struct is hashed and if the hash changes will force a +/// recompile of any fingerprint it's included into. Note that the "local" +/// terminology comes from the fact that it only has to do with one crate, and +/// `Fingerprint` tracks the transitive propagation of fingerprint changes. +/// +/// Note that because this is hashed its contents are carefully managed. Like +/// mentioned in the above module docs, we don't want to hash absolute paths or +/// mtime information. +/// +/// Also note that a `LocalFingerprint` is used in `check_filesystem` to detect +/// when the filesystem contains stale information (based on mtime currently). +/// The paths here don't change much between compilations but they're used as +/// inputs when we probe the filesystem looking at information. +#[derive(Debug, Serialize, Deserialize, Hash)] +enum LocalFingerprint { + /// This is a precalculated fingerprint which has an opaque string we just + /// hash as usual. This variant is primarily used for git/crates.io + /// dependencies where the source never changes so we can quickly conclude + /// that there's some string we can hash and it won't really change much. + /// + /// This is also used for build scripts with no `rerun-if-*` statements, but + /// that's overall a mistake and causes bugs in Cargo. We shouldn't use this + /// for build scripts. + Precalculated(String), + + /// This is used for crate compilations. The `dep_info` file is a relative + /// path anchored at `target_root(...)` to the dep-info file that Cargo + /// generates (which is a custom serialization after parsing rustc's own + /// `dep-info` output). + /// + /// The `dep_info` file, when present, also lists a number of other files + /// for us to look at. If any of those files are newer than this file then + /// we need to recompile. + CheckDepInfo { dep_info: PathBuf }, + + /// This represents a nonempty set of `rerun-if-changed` annotations printed + /// out by a build script. The `output` file is a arelative file anchored at + /// `target_root(...)` which is the actual output of the build script. That + /// output has already been parsed and the paths printed out via + /// `rerun-if-changed` are listed in `paths`. The `paths` field is relative + /// to `pkg.root()` + /// + /// This is considered up-to-date if all of the `paths` are older than + /// `output`, otherwise we need to recompile. + RerunIfChanged { + output: PathBuf, + paths: Vec, + }, + + /// This represents a single `rerun-if-env-changed` annotation printed by a + /// build script. The exact env var and value are hashed here. There's no + /// filesystem dependence here, and if the values are changed the hash will + /// change forcing a recompile. + RerunIfEnvChanged { var: String, val: Option }, +} + +enum StaleFile { + Missing(PathBuf), + Changed { + reference: PathBuf, + reference_mtime: FileTime, + stale: PathBuf, + stale_mtime: FileTime, + }, +} + +impl LocalFingerprint { + /// Checks dynamically at runtime if this `LocalFingerprint` has a stale + /// file. + /// + /// This will use the absolute root paths passed in if necessary to guide + /// file accesses. + fn find_stale_file( + &self, + mtime_cache: &mut HashMap, + pkg_root: &Path, + target_root: &Path, + ) -> CargoResult> { + match self { + // We need to parse `dep_info`, learn about all the files the crate + // depends on, and then see if any of them are newer than the + // dep_info file itself. If the `dep_info` file is missing then this + // unit has never been compiled! + LocalFingerprint::CheckDepInfo { dep_info } => { + let dep_info = target_root.join(dep_info); + if let Some(paths) = parse_dep_info(pkg_root, target_root, &dep_info)? { + Ok(find_stale_file(mtime_cache, &dep_info, paths.iter())) + } else { + Ok(Some(StaleFile::Missing(dep_info))) + } + } + + // We need to verify that no paths listed in `paths` are newer than + // the `output` path itself, or the last time the build script ran. + LocalFingerprint::RerunIfChanged { output, paths } => Ok(find_stale_file( + mtime_cache, + &target_root.join(output), + paths.iter().map(|p| pkg_root.join(p)), + )), + + // These have no dependencies on the filesystem, and their values + // are included natively in the `Fingerprint` hash so nothing + // tocheck for here. + LocalFingerprint::RerunIfEnvChanged { .. } => Ok(None), + LocalFingerprint::Precalculated(..) => Ok(None), + } + } + + fn kind(&self) -> &'static str { + match self { + LocalFingerprint::Precalculated(..) => "precalculated", + LocalFingerprint::CheckDepInfo { .. } => "dep-info", + LocalFingerprint::RerunIfChanged { .. } => "rerun-if-changed", + LocalFingerprint::RerunIfEnvChanged { .. } => "rerun-if-env-changed", + } + } +} + +#[derive(Debug)] +struct MtimeSlot(Mutex>); + +impl Fingerprint { + fn new() -> Fingerprint { + Fingerprint { + rustc: 0, + target: 0, + profile: 0, + path: 0, + features: String::new(), + deps: Vec::new(), + local: Mutex::new(Vec::new()), + memoized_hash: Mutex::new(None), + rustflags: Vec::new(), + metadata: 0, + fs_status: FsStatus::Stale, + outputs: Vec::new(), + } + } + + fn hash(&self) -> u64 { + if let Some(s) = *self.memoized_hash.lock().unwrap() { + return s; + } + let ret = util::hash_u64(self); + *self.memoized_hash.lock().unwrap() = Some(ret); + ret + } + + /// Compares this fingerprint with an old version which was previously + /// serialized to filesystem. + /// + /// The purpose of this is exclusively to produce a diagnostic message + /// indicating why we're recompiling something. This function always returns + /// an error, it will never return success. + fn compare(&self, old: &Fingerprint) -> CargoResult<()> { + if self.rustc != old.rustc { + bail!("rust compiler has changed") + } + if self.features != old.features { + bail!( + "features have changed: {} != {}", + self.features, + old.features + ) + } + if self.target != old.target { + bail!("target configuration has changed") + } + if self.path != old.path { + bail!("path to the compiler has changed") + } + if self.profile != old.profile { + bail!("profile configuration has changed") + } + if self.rustflags != old.rustflags { + bail!("RUSTFLAGS has changed") + } + if self.metadata != old.metadata { + bail!("metadata changed") + } + let my_local = self.local.lock().unwrap(); + let old_local = old.local.lock().unwrap(); + if my_local.len() != old_local.len() { + bail!("local lens changed"); + } + for (new, old) in my_local.iter().zip(old_local.iter()) { + match (new, old) { + (LocalFingerprint::Precalculated(a), LocalFingerprint::Precalculated(b)) => { + if a != b { + bail!("precalculated components have changed: {} != {}", a, b) + } + } + ( + LocalFingerprint::CheckDepInfo { dep_info: adep }, + LocalFingerprint::CheckDepInfo { dep_info: bdep }, + ) => { + if adep != bdep { + bail!("dep info output changed: {:?} != {:?}", adep, bdep) + } + } + ( + LocalFingerprint::RerunIfChanged { + output: aout, + paths: apaths, + }, + LocalFingerprint::RerunIfChanged { + output: bout, + paths: bpaths, + }, + ) => { + if aout != bout { + bail!("rerun-if-changed output changed: {:?} != {:?}", aout, bout) + } + if apaths != bpaths { + bail!( + "rerun-if-changed output changed: {:?} != {:?}", + apaths, + bpaths, + ) + } + } + ( + LocalFingerprint::RerunIfEnvChanged { + var: akey, + val: avalue, + }, + LocalFingerprint::RerunIfEnvChanged { + var: bkey, + val: bvalue, + }, + ) => { + if *akey != *bkey { + bail!("env vars changed: {} != {}", akey, bkey); + } + if *avalue != *bvalue { + bail!( + "env var `{}` changed: previously {:?} now {:?}", + akey, + bvalue, + avalue + ) + } + } + (a, b) => bail!( + "local fingerprint type has changed ({} => {})", + b.kind(), + a.kind() + ), + } + } + + if self.deps.len() != old.deps.len() { + bail!("number of dependencies has changed") + } + for (a, b) in self.deps.iter().zip(old.deps.iter()) { + if a.name != b.name { + let e = format_err!("`{}` != `{}`", a.name, b.name) + .context("unit dependency name changed"); + return Err(e.into()); + } + + if a.fingerprint.hash() != b.fingerprint.hash() { + let e = format_err!( + "new ({}/{:x}) != old ({}/{:x})", + a.name, + a.fingerprint.hash(), + b.name, + b.fingerprint.hash() + ) + .context("unit dependency information changed"); + return Err(e.into()); + } + } + + if !self.fs_status.up_to_date() { + bail!("current filesystem status shows we're outdated"); + } + + // This typically means some filesystem modifications happened or + // something transitive was odd. In general we should strive to provide + // a better error message than this, so if you see this message a lot it + // likely means this method needs to be updated! + bail!("two fingerprint comparison turned up nothing obvious"); + } + + /// Dynamically inspect the local filesystem to update the `fs_status` field + /// of this `Fingerprint`. + /// + /// This function is used just after a `Fingerprint` is constructed to check + /// the local state of the filesystem and propagate any dirtiness from + /// dependencies up to this unit as well. This function assumes that the + /// unit starts out as `FsStatus::Stale` and then it will optionally switch + /// it to `UpToDate` if it can. + fn check_filesystem( + &mut self, + mtime_cache: &mut HashMap, + pkg_root: &Path, + target_root: &Path, + ) -> CargoResult<()> { + assert!(!self.fs_status.up_to_date()); + + let mut mtimes = HashMap::new(); + + // Get the `mtime` of all outputs. Optionally update their mtime + // afterwards based on the `mtime_on_use` flag. Afterwards we want the + // minimum mtime as it's the one we'll be comparing to inputs and + // dependencies. + for output in self.outputs.iter() { + let mtime = match paths::mtime(output) { + Ok(mtime) => mtime, + + // This path failed to report its `mtime`. It probably doesn't + // exists, so leave ourselves as stale and bail out. + Err(e) => { + log::debug!("failed to get mtime of {:?}: {}", output, e); + return Ok(()); + } + }; + assert!(mtimes.insert(output.clone(), mtime).is_none()); + } + + let max_mtime = match mtimes.values().max() { + Some(mtime) => mtime, + + // We had no output files. This means we're an overridden build + // script and we're just always up to date because we aren't + // watching the filesystem. + None => { + self.fs_status = FsStatus::UpToDate { mtimes }; + return Ok(()); + } + }; + + for dep in self.deps.iter() { + let dep_mtimes = match &dep.fingerprint.fs_status { + FsStatus::UpToDate { mtimes } => mtimes, + // If our dependency is stale, so are we, so bail out. + FsStatus::Stale => return Ok(()), + }; + + // If our dependency edge only requires the rmeta file to be present + // then we only need to look at that one output file, otherwise we + // need to consider all output files to see if we're out of date. + let dep_mtime = if dep.only_requires_rmeta { + dep_mtimes + .iter() + .filter_map(|(path, mtime)| { + if path.extension().and_then(|s| s.to_str()) == Some("rmeta") { + Some(mtime) + } else { + None + } + }) + .next() + .expect("failed to find rmeta") + } else { + match dep_mtimes.values().max() { + Some(mtime) => mtime, + // If our dependencies is up to date and has no filesystem + // interactions, then we can move on to the next dependency. + None => continue, + } + }; + + // If the dependency is newer than our own output then it was + // recompiled previously. We transitively become stale ourselves in + // that case, so bail out. + // + // Note that this comparison should probably be `>=`, not `>`, but + // for a discussion of why it's `>` see the discussion about #5918 + // below in `find_stale`. + if dep_mtime > max_mtime { + log::info!("dependency on `{}` is newer than we are", dep.name); + return Ok(()); + } + } + + // If we reached this far then all dependencies are up to date. Check + // all our `LocalFingerprint` information to see if we have any stale + // files for this package itself. If we do find something log a helpful + // message and bail out so we stay stale. + for local in self.local.get_mut().unwrap().iter() { + if let Some(file) = local.find_stale_file(mtime_cache, pkg_root, target_root)? { + file.log(); + return Ok(()); + } + } + + // Everything was up to date! Record such. + self.fs_status = FsStatus::UpToDate { mtimes }; + + Ok(()) + } +} + +impl hash::Hash for Fingerprint { + fn hash(&self, h: &mut H) { + let Fingerprint { + rustc, + ref features, + target, + path, + profile, + ref deps, + ref local, + metadata, + ref rustflags, + .. + } = *self; + let local = local.lock().unwrap(); + ( + rustc, features, target, path, profile, &*local, metadata, rustflags, + ) + .hash(h); + + h.write_usize(deps.len()); + for DepFingerprint { + pkg_id, + name, + public, + fingerprint, + only_requires_rmeta: _, // static property, no need to hash + } in deps + { + pkg_id.hash(h); + name.hash(h); + public.hash(h); + // use memoized dep hashes to avoid exponential blowup + h.write_u64(Fingerprint::hash(fingerprint)); + } + } +} + +impl hash::Hash for MtimeSlot { + fn hash(&self, h: &mut H) { + self.0.lock().unwrap().hash(h) + } +} + +impl ser::Serialize for MtimeSlot { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + self.0 + .lock() + .unwrap() + .map(|ft| (ft.unix_seconds(), ft.nanoseconds())) + .serialize(s) + } +} + +impl<'de> de::Deserialize<'de> for MtimeSlot { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let kind: Option<(i64, u32)> = de::Deserialize::deserialize(d)?; + Ok(MtimeSlot(Mutex::new( + kind.map(|(s, n)| FileTime::from_unix_time(s, n)), + ))) + } +} + +impl DepFingerprint { + fn new<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + parent: &Unit<'a>, + dep: &Unit<'a>, + ) -> CargoResult { + let fingerprint = calculate(cx, dep)?; + let name = cx.bcx.extern_crate_name(parent, dep)?; + let public = cx.bcx.is_public_dependency(parent, dep); + + // We need to be careful about what we hash here. We have a goal of + // supporting renaming a project directory and not rebuilding + // everything. To do that, however, we need to make sure that the cwd + // doesn't make its way into any hashes, and one source of that is the + // `SourceId` for `path` packages. + // + // We already have a requirement that `path` packages all have unique + // names (sort of for this same reason), so if the package source is a + // `path` then we just hash the name, but otherwise we hash the full + // id as it won't change when the directory is renamed. + let pkg_id = if dep.pkg.package_id().source_id().is_path() { + util::hash_u64(dep.pkg.package_id().name()) + } else { + util::hash_u64(dep.pkg.package_id()) + }; + + Ok(DepFingerprint { + pkg_id, + name, + public, + fingerprint, + only_requires_rmeta: cx.only_requires_rmeta(parent, dep), + }) + } +} + +impl StaleFile { + /// Use the `log` crate to log a hopefully helpful message in diagnosing + /// what file is considered stale and why. This is intended to be used in + /// conjunction with `CARGO_LOG` to determine why Cargo is recompiling + /// something. Currently there's no user-facing usage of this other than + /// that. + fn log(&self) { + match self { + StaleFile::Missing(path) => { + log::info!("stale: missing {:?}", path); + } + StaleFile::Changed { + reference, + reference_mtime, + stale, + stale_mtime, + } => { + log::info!("stale: changed {:?}", stale); + log::info!(" (vs) {:?}", reference); + log::info!(" {:?} != {:?}", reference_mtime, stale_mtime); + } + } + } +} + +/// Calculates the fingerprint for a `unit`. +/// +/// This fingerprint is used by Cargo to learn about when information such as: +/// +/// * A non-path package changes (changes version, changes revision, etc). +/// * Any dependency changes +/// * The compiler changes +/// * The set of features a package is built with changes +/// * The profile a target is compiled with changes (e.g., opt-level changes) +/// * Any other compiler flags change that will affect the result +/// +/// Information like file modification time is only calculated for path +/// dependencies. +fn calculate<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult> { + // This function is slammed quite a lot, so the result is memoized. + if let Some(s) = cx.fingerprints.get(unit) { + return Ok(Arc::clone(s)); + } + let mut fingerprint = if unit.mode.is_run_custom_build() { + calculate_run_custom_build(cx, unit)? + } else if unit.mode.is_doc_test() { + panic!("doc tests do not fingerprint"); + } else { + calculate_normal(cx, unit)? + }; + + // After we built the initial `Fingerprint` be sure to update the + // `fs_status` field of it. + let target_root = target_root(cx); + fingerprint.check_filesystem(&mut cx.mtime_cache, unit.pkg.root(), &target_root)?; + + let fingerprint = Arc::new(fingerprint); + cx.fingerprints.insert(*unit, Arc::clone(&fingerprint)); + Ok(fingerprint) +} + +/// Calculate a fingerprint for a "normal" unit, or anything that's not a build +/// script. This is an internal helper of `calculate`, don't call directly. +fn calculate_normal<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult { + // Recursively calculate the fingerprint for all of our dependencies. + // + // Skip fingerprints of binaries because they don't actually induce a + // recompile, they're just dependencies in the sense that they need to be + // built. + let mut deps = cx + .dep_targets(unit) + .iter() + .filter(|u| !u.target.is_bin()) + .map(|dep| DepFingerprint::new(cx, unit, dep)) + .collect::>>()?; + deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); + + // Afterwards calculate our own fingerprint information. We specially + // handle `path` packages to ensure we track files on the filesystem + // correctly, but otherwise upstream packages like from crates.io or git + // get bland fingerprints because they don't change without their + // `PackageId` changing. + let target_root = target_root(cx); + let local = if use_dep_info(unit) { + let dep_info = dep_info_loc(cx, unit); + let dep_info = dep_info.strip_prefix(&target_root).unwrap().to_path_buf(); + vec![LocalFingerprint::CheckDepInfo { dep_info }] + } else { + let fingerprint = pkg_fingerprint(cx.bcx, unit.pkg)?; + vec![LocalFingerprint::Precalculated(fingerprint)] + }; + + // Figure out what the outputs of our unit is, and we'll be storing them + // into the fingerprint as well. + let outputs = cx + .outputs(unit)? + .iter() + .filter(|output| output.flavor != FileFlavor::DebugInfo) + .map(|output| output.path.clone()) + .collect(); + + // Fill out a bunch more information that we'll be tracking typically + // hashed to take up less space on disk as we just need to know when things + // change. + let extra_flags = if unit.mode.is_doc() { + cx.bcx.rustdocflags_args(unit) + } else { + cx.bcx.rustflags_args(unit) + }; + let profile_hash = util::hash_u64((&unit.profile, unit.mode, cx.bcx.extra_args_for(unit))); + // Include metadata since it is exposed as environment variables. + let m = unit.pkg.manifest().metadata(); + let metadata = util::hash_u64((&m.authors, &m.description, &m.homepage, &m.repository)); + Ok(Fingerprint { + rustc: util::hash_u64(&cx.bcx.rustc.verbose_version), + target: util::hash_u64(&unit.target), + profile: profile_hash, + // Note that .0 is hashed here, not .1 which is the cwd. That doesn't + // actually affect the output artifact so there's no need to hash it. + path: util::hash_u64(super::path_args(cx.bcx, unit).0), + features: format!( + "{:?}", + cx.bcx.resolve.features_sorted(unit.pkg.package_id()) + ), + deps, + local: Mutex::new(local), + memoized_hash: Mutex::new(None), + metadata, + rustflags: extra_flags.to_vec(), + fs_status: FsStatus::Stale, + outputs, + }) +} + +/// Whether or not the fingerprint should track the dependencies from the +/// dep-info file for this unit. +fn use_dep_info(unit: &Unit<'_>) -> bool { + !unit.mode.is_doc() +} + +/// Calculate a fingerprint for an "execute a build script" unit. This is an +/// internal helper of `calculate`, don't call directly. +fn calculate_run_custom_build<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult { + // Using the `BuildDeps` information we'll have previously parsed and + // inserted into `build_explicit_deps` built an initial snapshot of the + // `LocalFingerprint` list for this build script. If we previously executed + // the build script this means we'll be watching files and env vars. + // Otherwise if we haven't previously executed it we'll just start watching + // the whole crate. + let (gen_local, overridden) = build_script_local_fingerprints(cx, unit); + let deps = &cx.build_explicit_deps[unit]; + let local = (gen_local)(deps, Some(&|| pkg_fingerprint(cx.bcx, unit.pkg)))?.unwrap(); + let output = deps.build_script_output.clone(); + + // Include any dependencies of our execution, which is typically just the + // compilation of the build script itself. (if the build script changes we + // should be rerun!). Note though that if we're an overridden build script + // we have no dependencies so no need to recurse in that case. + let deps = if overridden { + // Overridden build scripts don't need to track deps. + vec![] + } else { + cx.dep_targets(unit) + .iter() + .map(|dep| DepFingerprint::new(cx, unit, dep)) + .collect::>>()? + }; + + Ok(Fingerprint { + local: Mutex::new(local), + rustc: util::hash_u64(&cx.bcx.rustc.verbose_version), + deps, + outputs: if overridden { Vec::new() } else { vec![output] }, + + // Most of the other info is blank here as we don't really include it + // in the execution of the build script, but... this may be a latent + // bug in Cargo. + ..Fingerprint::new() + }) +} + +/// Get ready to compute the `LocalFingerprint` values for a `RunCustomBuild` +/// unit. +/// +/// This function has, what's on the surface, a seriously wonky interface. +/// You'll call this function and it'll return a closure and a boolean. The +/// boolean is pretty simple in that it indicates whether the `unit` has been +/// overridden via `.cargo/config`. The closure is much more complicated. +/// +/// This closure is intended to capture any local state necessary to compute +/// the `LocalFingerprint` values for this unit. It is `Send` and `'static` to +/// be sent to other threads as well (such as when we're executing build +/// scripts). That deduplication is the rationale for the closure at least. +/// +/// The arguments to the closure are a bit weirder, though, and I'll apologize +/// in advance for the weirdness too. The first argument to the closure is a +/// `&BuildDeps`. This is the parsed version of a build script, and when Cargo +/// starts up this is cached from previous runs of a build script. After a +/// build script executes the output file is reparsed and passed in here. +/// +/// The second argument is the weirdest, it's *optionally* a closure to +/// call `pkg_fingerprint` below. The `pkg_fingerprint` below requires access +/// to "source map" located in `Context`. That's very non-`'static` and +/// non-`Send`, so it can't be used on other threads, such as when we invoke +/// this after a build script has finished. The `Option` allows us to for sure +/// calculate it on the main thread at the beginning, and then swallow the bug +/// for now where a worker thread after a build script has finished doesn't +/// have access. Ideally there would be no second argument or it would be more +/// "first class" and not an `Option` but something that can be sent between +/// threads. In any case, it's a bug for now. +/// +/// This isn't the greatest of interfaces, and if there's suggestions to +/// improve please do so! +/// +/// FIXME(#6779) - see all the words above +fn build_script_local_fingerprints<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> ( + Box< + dyn FnOnce( + &BuildDeps, + Option<&dyn Fn() -> CargoResult>, + ) -> CargoResult>> + + Send, + >, + bool, +) { + // First up, if this build script is entirely overridden, then we just + // return the hash of what we overrode it with. This is the easy case! + if let Some(fingerprint) = build_script_override_fingerprint(cx, unit) { + debug!("override local fingerprints deps"); + return ( + Box::new( + move |_: &BuildDeps, _: Option<&dyn Fn() -> CargoResult>| { + Ok(Some(vec![fingerprint])) + }, + ), + true, // this is an overridden build script + ); + } + + // ... Otherwise this is a "real" build script and we need to return a real + // closure. Our returned closure classifies the build script based on + // whether it prints `rerun-if-*`. If it *doesn't* print this it's where the + // magical second argument comes into play, which fingerprints a whole + // package. Remember that the fact that this is an `Option` is a bug, but a + // longstanding bug, in Cargo. Recent refactorings just made it painfully + // obvious. + let pkg_root = unit.pkg.root().to_path_buf(); + let target_dir = target_root(cx); + let calculate = + move |deps: &BuildDeps, pkg_fingerprint: Option<&dyn Fn() -> CargoResult>| { + if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { + match pkg_fingerprint { + // FIXME: this is somewhat buggy with respect to docker and + // weird filesystems. The `Precalculated` variant + // constructed below will, for `path` dependencies, contain + // a stringified version of the mtime for the local crate. + // This violates one of the things we describe in this + // module's doc comment, never hashing mtimes. We should + // figure out a better scheme where a package fingerprint + // may be a string (like for a registry) or a list of files + // (like for a path dependency). Those list of files would + // be stored here rather than the the mtime of them. + Some(f) => { + debug!("old local fingerprints deps"); + let s = f()?; + return Ok(Some(vec![LocalFingerprint::Precalculated(s)])); + } + None => return Ok(None), + } + } + + // Ok so now we're in "new mode" where we can have files listed as + // dependencies as well as env vars listed as dependencies. Process + // them all here. + Ok(Some(local_fingerprints_deps(deps, &target_dir, &pkg_root))) + }; + + // Note that `false` == "not overridden" + (Box::new(calculate), false) +} + +/// Create a `LocalFingerprint` for an overridden build script. +/// Returns None if it is not overridden. +fn build_script_override_fingerprint<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> Option { + let state = cx.build_state.outputs.lock().unwrap(); + let output = state.get(&(unit.pkg.package_id(), unit.kind))?; + let s = format!( + "overridden build state with hash: {}", + util::hash_u64(output) + ); + Some(LocalFingerprint::Precalculated(s)) +} + +/// Compute the `LocalFingerprint` values for a `RunCustomBuild` unit for +/// non-overridden new-style build scripts only. This is only used when `deps` +/// is already known to have a nonempty `rerun-if-*` somewhere. +fn local_fingerprints_deps( + deps: &BuildDeps, + target_root: &Path, + pkg_root: &Path, +) -> Vec { + debug!("new local fingerprints deps"); + let mut local = Vec::new(); + + if !deps.rerun_if_changed.is_empty() { + // Note that like the module comment above says we are careful to never + // store an absolute path in `LocalFingerprint`, so ensure that we strip + // absolute prefixes from them. + let output = deps + .build_script_output + .strip_prefix(target_root) + .unwrap() + .to_path_buf(); + let paths = deps + .rerun_if_changed + .iter() + .map(|p| p.strip_prefix(pkg_root).unwrap_or(p).to_path_buf()) + .collect(); + local.push(LocalFingerprint::RerunIfChanged { output, paths }); + } + + for var in deps.rerun_if_env_changed.iter() { + let val = env::var(var).ok(); + local.push(LocalFingerprint::RerunIfEnvChanged { + var: var.clone(), + val, + }); + } + + local +} + +fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { + debug_assert_ne!(fingerprint.rustc, 0); + // fingerprint::new().rustc == 0, make sure it doesn't make it to the file system. + // This is mostly so outside tools can reliably find out what rust version this file is for, + // as we can use the full hash. + let hash = fingerprint.hash(); + debug!("write fingerprint ({:x}) : {}", hash, loc.display()); + paths::write(loc, util::to_hex(hash).as_bytes())?; + + let json = serde_json::to_string(fingerprint).unwrap(); + if cfg!(debug_assertions) { + let f: Fingerprint = serde_json::from_str(&json).unwrap(); + assert_eq!(f.hash(), hash); + } + paths::write(&loc.with_extension("json"), json.as_bytes())?; + Ok(()) +} + +/// Prepare for work when a package starts to build +pub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<()> { + let new1 = cx.files().fingerprint_dir(unit); + + // Doc tests have no output, thus no fingerprint. + if !new1.exists() && !unit.mode.is_doc_test() { + fs::create_dir(&new1)?; + } + + Ok(()) +} + +/// Returns the location that the dep-info file will show up at for the `unit` +/// specified. +pub fn dep_info_loc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> PathBuf { + cx.files() + .fingerprint_dir(unit) + .join(&format!("dep-{}", filename(cx, unit))) +} + +/// Returns an absolute path that target directory. +/// All paths are rewritten to be relative to this. +fn target_root(cx: &Context<'_, '_>) -> PathBuf { + cx.bcx.ws.target_dir().into_path_unlocked() +} + +fn compare_old_fingerprint( + loc: &Path, + new_fingerprint: &Fingerprint, + mtime_on_use: bool, +) -> CargoResult<()> { + let old_fingerprint_short = paths::read(loc)?; + + if mtime_on_use { + // update the mtime so other cleaners know we used it + let t = FileTime::from_system_time(SystemTime::now()); + filetime::set_file_times(loc, t, t)?; + } + + let new_hash = new_fingerprint.hash(); + + if util::to_hex(new_hash) == old_fingerprint_short && new_fingerprint.fs_status.up_to_date() { + return Ok(()); + } + + let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; + let old_fingerprint: Fingerprint = serde_json::from_str(&old_fingerprint_json) + .chain_err(|| internal("failed to deserialize json"))?; + debug_assert_eq!(util::to_hex(old_fingerprint.hash()), old_fingerprint_short); + let result = new_fingerprint.compare(&old_fingerprint); + assert!(result.is_err()); + result +} + +fn log_compare(unit: &Unit<'_>, compare: &CargoResult<()>) { + let ce = match compare { + Ok(..) => return, + Err(e) => e, + }; + info!( + "fingerprint error for {}/{:?}/{:?}", + unit.pkg, unit.mode, unit.target, + ); + info!(" err: {}", ce); + + for cause in ce.iter_causes() { + info!(" cause: {}", cause); + } +} + +// Parse the dep-info into a list of paths +pub fn parse_dep_info( + pkg_root: &Path, + target_root: &Path, + dep_info: &Path, +) -> CargoResult>> { + let data = match paths::read_bytes(dep_info) { + Ok(data) => data, + Err(_) => return Ok(None), + }; + let paths = data + .split(|&x| x == 0) + .filter(|x| !x.is_empty()) + .map(|p| { + let ty = match DepInfoPathType::from_byte(p[0]) { + Some(ty) => ty, + None => return Err(internal("dep-info invalid")), + }; + let path = util::bytes2path(&p[1..])?; + match ty { + DepInfoPathType::PackageRootRelative => Ok(pkg_root.join(path)), + // N.B. path might be absolute here in which case the join will have no effect + DepInfoPathType::TargetRootRelative => Ok(target_root.join(path)), + } + }) + .collect::, _>>()?; + Ok(Some(paths)) +} + +fn pkg_fingerprint(bcx: &BuildContext<'_, '_>, pkg: &Package) -> CargoResult { + let source_id = pkg.package_id().source_id(); + let sources = bcx.packages.sources(); + + let source = sources + .get(source_id) + .ok_or_else(|| internal("missing package source"))?; + source.fingerprint(pkg) +} + +fn find_stale_file( + mtime_cache: &mut HashMap, + reference: &Path, + paths: I, +) -> Option +where + I: IntoIterator, + I::Item: AsRef, +{ + let reference_mtime = match paths::mtime(reference) { + Ok(mtime) => mtime, + Err(..) => return Some(StaleFile::Missing(reference.to_path_buf())), + }; + + for path in paths { + let path = path.as_ref(); + let path_mtime = match mtime_cache.entry(path.to_path_buf()) { + Entry::Occupied(o) => *o.get(), + Entry::Vacant(v) => { + let mtime = match paths::mtime(path) { + Ok(mtime) => mtime, + Err(..) => return Some(StaleFile::Missing(path.to_path_buf())), + }; + *v.insert(mtime) + } + }; + + // TODO: fix #5918. + // Note that equal mtimes should be considered "stale". For filesystems with + // not much timestamp precision like 1s this is would be a conservative approximation + // to handle the case where a file is modified within the same second after + // a build starts. We want to make sure that incremental rebuilds pick that up! + // + // For filesystems with nanosecond precision it's been seen in the wild that + // its "nanosecond precision" isn't really nanosecond-accurate. It turns out that + // kernels may cache the current time so files created at different times actually + // list the same nanosecond precision. Some digging on #5919 picked up that the + // kernel caches the current time between timer ticks, which could mean that if + // a file is updated at most 10ms after a build starts then Cargo may not + // pick up the build changes. + // + // All in all, an equality check here would be a conservative assumption that, + // if equal, files were changed just after a previous build finished. + // Unfortunately this became problematic when (in #6484) cargo switch to more accurately + // measuring the start time of builds. + if path_mtime <= reference_mtime { + continue; + } + + return Some(StaleFile::Changed { + reference: reference.to_path_buf(), + reference_mtime, + stale: path.to_path_buf(), + stale_mtime: path_mtime, + }); + } + + None +} + +fn filename<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> String { + // file_stem includes metadata hash. Thus we have a different + // fingerprint for every metadata hash version. This works because + // even if the package is fresh, we'll still link the fresh target + let file_stem = cx.files().file_stem(unit); + let kind = unit.target.kind().description(); + let flavor = if unit.mode.is_any_test() { + "test-" + } else if unit.mode.is_doc() { + "doc-" + } else if unit.mode.is_run_custom_build() { + "run-" + } else { + "" + }; + format!("{}{}-{}", flavor, kind, file_stem) +} + +#[repr(u8)] +enum DepInfoPathType { + // src/, e.g. src/lib.rs + PackageRootRelative = 1, + // target/debug/deps/lib... + // or an absolute path /.../sysroot/... + TargetRootRelative = 2, +} + +impl DepInfoPathType { + fn from_byte(b: u8) -> Option { + match b { + 1 => Some(DepInfoPathType::PackageRootRelative), + 2 => Some(DepInfoPathType::TargetRootRelative), + _ => None, + } + } +} + +/// Parses the dep-info file coming out of rustc into a Cargo-specific format. +/// +/// This function will parse `rustc_dep_info` as a makefile-style dep info to +/// learn about the all files which a crate depends on. This is then +/// re-serialized into the `cargo_dep_info` path in a Cargo-specific format. +/// +/// The `pkg_root` argument here is the absolute path to the directory +/// containing `Cargo.toml` for this crate that was compiled. The paths listed +/// in the rustc dep-info file may or may not be absolute but we'll want to +/// consider all of them relative to the `root` specified. +/// +/// The `rustc_cwd` argument is the absolute path to the cwd of the compiler +/// when it was invoked. +/// +/// If the `allow_package` argument is true, then package-relative paths are +/// included. If it is false, then package-relative paths are skipped and +/// ignored (typically used for registry or git dependencies where we assume +/// the source never changes, and we don't want the cost of running `stat` on +/// all those files). +/// +/// The serialized Cargo format will contain a list of files, all of which are +/// relative if they're under `root`. or absolute if they're elsewhere. +pub fn translate_dep_info( + rustc_dep_info: &Path, + cargo_dep_info: &Path, + rustc_cwd: &Path, + pkg_root: &Path, + target_root: &Path, + allow_package: bool, +) -> CargoResult<()> { + let target = parse_rustc_dep_info(rustc_dep_info)?; + let deps = &target + .get(0) + .ok_or_else(|| internal("malformed dep-info format, no targets".to_string()))? + .1; + + let target_root = target_root.canonicalize()?; + let pkg_root = pkg_root.canonicalize()?; + let mut new_contents = Vec::new(); + for file in deps { + // The path may be absolute or relative, canonical or not. Make sure + // it is canonicalized so we are comparing the same kinds of paths. + let canon_file = rustc_cwd.join(file).canonicalize()?; + let abs_file = rustc_cwd.join(file); + + let (ty, path) = if let Ok(stripped) = canon_file.strip_prefix(&target_root) { + (DepInfoPathType::TargetRootRelative, stripped) + } else if let Ok(stripped) = canon_file.strip_prefix(&pkg_root) { + if !allow_package { + continue; + } + (DepInfoPathType::PackageRootRelative, stripped) + } else { + // It's definitely not target root relative, but this is an absolute path (since it was + // joined to rustc_cwd) and as such re-joining it later to the target root will have no + // effect. + (DepInfoPathType::TargetRootRelative, &*abs_file) + }; + new_contents.push(ty as u8); + new_contents.extend(util::path2bytes(path)?); + new_contents.push(0); + } + paths::write(cargo_dep_info, &new_contents)?; + Ok(()) +} + +pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult)>> { + let contents = paths::read(rustc_dep_info)?; + contents + .lines() + .filter_map(|l| l.find(": ").map(|i| (l, i))) + .map(|(line, pos)| { + let target = &line[..pos]; + let mut deps = line[pos + 2..].split_whitespace(); + + let mut ret = Vec::new(); + while let Some(s) = deps.next() { + let mut file = s.to_string(); + while file.ends_with('\\') { + file.pop(); + file.push(' '); + file.push_str(deps.next().ok_or_else(|| { + internal("malformed dep-info format, trailing \\".to_string()) + })?); + } + ret.push(file); + } + Ok((target.to_string(), ret)) + }) + .collect() +} diff --git a/src/cargo/core/compiler/job.rs b/src/cargo/core/compiler/job.rs new file mode 100644 index 00000000000..000440d2ee9 --- /dev/null +++ b/src/cargo/core/compiler/job.rs @@ -0,0 +1,91 @@ +use std::fmt; +use std::mem; + +use super::job_queue::JobState; +use crate::util::CargoResult; + +pub struct Job { + work: Work, + fresh: Freshness, +} + +/// Each proc should send its description before starting. +/// It should send either once or close immediately. +pub struct Work { + inner: Box FnBox<&'a JobState<'b>, CargoResult<()>> + Send>, +} + +trait FnBox { + fn call_box(self: Box, a: A) -> R; +} + +impl R> FnBox for F { + fn call_box(self: Box, a: A) -> R { + (*self)(a) + } +} + +impl Work { + pub fn new(f: F) -> Work + where + F: FnOnce(&JobState<'_>) -> CargoResult<()> + Send + 'static, + { + Work { inner: Box::new(f) } + } + + pub fn noop() -> Work { + Work::new(|_| Ok(())) + } + + pub fn call(self, tx: &JobState<'_>) -> CargoResult<()> { + self.inner.call_box(tx) + } + + pub fn then(self, next: Work) -> Work { + Work::new(move |state| { + self.call(state)?; + next.call(state) + }) + } +} + +impl Job { + /// Creates a new job representing a unit of work. + pub fn new(work: Work, fresh: Freshness) -> Job { + Job { work, fresh } + } + + /// Consumes this job by running it, returning the result of the + /// computation. + pub fn run(self, state: &JobState<'_>) -> CargoResult<()> { + self.work.call(state) + } + + /// Returns whether this job was fresh/dirty, where "fresh" means we're + /// likely to perform just some small bookkeeping where "dirty" means we'll + /// probably do something slow like invoke rustc. + pub fn freshness(&self) -> Freshness { + self.fresh + } + + pub fn before(&mut self, next: Work) { + let prev = mem::replace(&mut self.work, Work::noop()); + self.work = next.then(prev); + } +} + +impl fmt::Debug for Job { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Job {{ ... }}") + } +} + +/// Indication of the freshness of a package. +/// +/// A fresh package does not necessarily need to be rebuilt (unless a dependency +/// was also rebuilt), and a dirty package must always be rebuilt. +#[derive(PartialEq, Eq, Debug, Clone, Copy)] +pub enum Freshness { + Fresh, + Dirty, +} diff --git a/src/cargo/core/compiler/job_queue.rs b/src/cargo/core/compiler/job_queue.rs new file mode 100644 index 00000000000..1b999be7f42 --- /dev/null +++ b/src/cargo/core/compiler/job_queue.rs @@ -0,0 +1,623 @@ +use std::cell::Cell; +use std::collections::{HashMap, HashSet}; +use std::io; +use std::marker; +use std::sync::mpsc::{channel, Receiver, Sender}; +use std::sync::Arc; + +use crossbeam_utils::thread::Scope; +use jobserver::{Acquired, HelperThread}; +use log::{debug, info, trace}; + +use super::context::OutputFile; +use super::job::{ + Freshness::{self, Dirty, Fresh}, + Job, +}; +use super::{BuildContext, BuildPlan, CompileMode, Context, Unit}; +use crate::core::{PackageId, TargetKind}; +use crate::handle_error; +use crate::util; +use crate::util::diagnostic_server::{self, DiagnosticPrinter}; +use crate::util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder}; +use crate::util::{Config, DependencyQueue}; +use crate::util::{Progress, ProgressStyle}; + +/// A management structure of the entire dependency graph to compile. +/// +/// This structure is backed by the `DependencyQueue` type and manages the +/// actual compilation step of each package. Packages enqueue units of work and +/// then later on the entire graph is processed and compiled. +pub struct JobQueue<'a, 'cfg> { + queue: DependencyQueue, Artifact, Job>, + tx: Sender, + rx: Receiver, + active: HashMap>, + compiled: HashSet, + documented: HashSet, + counts: HashMap, + is_release: bool, + progress: Progress<'cfg>, + next_id: u32, +} + +pub struct JobState<'a> { + /// Channel back to the main thread to coordinate messages and such. + tx: Sender, + + /// The job id that this state is associated with, used when sending + /// messages back to the main thread. + id: u32, + + /// Whether or not we're expected to have a call to `rmeta_produced`. Once + /// that method is called this is dynamically set to `false` to prevent + /// sending a double message later on. + rmeta_required: Cell, + + // Historical versions of Cargo made use of the `'a` argument here, so to + // leave the door open to future refactorings keep it here. + _marker: marker::PhantomData<&'a ()>, +} + +/// Possible artifacts that can be produced by compilations, used as edge values +/// in the dependency graph. +/// +/// As edge values we can have multiple kinds of edges depending on one node, +/// for example some units may only depend on the metadata for an rlib while +/// others depend on the full rlib. This `Artifact` enum is used to distinguish +/// this case and track the progress of compilations as they proceed. +#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] +enum Artifact { + /// A generic placeholder for "depends on everything run by a step" and + /// means that we can't start the next compilation until the previous has + /// finished entirely. + All, + + /// A node indicating that we only depend on the metadata of a compilation, + /// but the compilation is typically also producing an rlib. We can start + /// our step, however, before the full rlib is available. + Metadata, +} + +enum Message { + Run(String), + BuildPlanMsg(String, ProcessBuilder, Arc>), + Stdout(String), + Stderr(String), + FixDiagnostic(diagnostic_server::Message), + Token(io::Result), + Finish(u32, Artifact, CargoResult<()>), +} + +impl<'a> JobState<'a> { + pub fn running(&self, cmd: &ProcessBuilder) { + let _ = self.tx.send(Message::Run(cmd.to_string())); + } + + pub fn build_plan( + &self, + module_name: String, + cmd: ProcessBuilder, + filenames: Arc>, + ) { + let _ = self + .tx + .send(Message::BuildPlanMsg(module_name, cmd, filenames)); + } + + pub fn stdout(&self, stdout: String) { + drop(self.tx.send(Message::Stdout(stdout))); + } + + pub fn stderr(&self, stderr: String) { + drop(self.tx.send(Message::Stderr(stderr))); + } + + /// A method used to signal to the coordinator thread that the rmeta file + /// for an rlib has been produced. This is only called for some rmeta + /// builds when required, and can be called at any time before a job ends. + /// This should only be called once because a metadata file can only be + /// produced once! + pub fn rmeta_produced(&self) { + assert!(self.rmeta_required.get()); + self.rmeta_required.set(false); + let _ = self + .tx + .send(Message::Finish(self.id, Artifact::Metadata, Ok(()))); + } +} + +impl<'a, 'cfg> JobQueue<'a, 'cfg> { + pub fn new(bcx: &BuildContext<'a, 'cfg>) -> JobQueue<'a, 'cfg> { + let (tx, rx) = channel(); + let progress = Progress::with_style("Building", ProgressStyle::Ratio, bcx.config); + JobQueue { + queue: DependencyQueue::new(), + tx, + rx, + active: HashMap::new(), + compiled: HashSet::new(), + documented: HashSet::new(), + counts: HashMap::new(), + is_release: bcx.build_config.release, + progress, + next_id: 0, + } + } + + pub fn enqueue( + &mut self, + cx: &Context<'a, 'cfg>, + unit: &Unit<'a>, + job: Job, + ) -> CargoResult<()> { + let dependencies = cx.dep_targets(unit); + let mut queue_deps = dependencies + .iter() + .cloned() + .filter(|unit| { + // Binaries aren't actually needed to *compile* tests, just to run + // them, so we don't include this dependency edge in the job graph. + !unit.target.is_test() || !unit.target.is_bin() + }) + .map(|dep| { + // Handle the case here where our `unit -> dep` dependency may + // only require the metadata, not the full compilation to + // finish. Use the tables in `cx` to figure out what kind + // of artifact is associated with this dependency. + let artifact = if cx.only_requires_rmeta(unit, &dep) { + Artifact::Metadata + } else { + Artifact::All + }; + (dep, artifact) + }) + .collect::>(); + + // This is somewhat tricky, but we may need to synthesize some + // dependencies for this target if it requires full upstream + // compilations to have completed. If we're in pipelining mode then some + // dependency edges may be `Metadata` due to the above clause (as + // opposed to everything being `All`). For example consider: + // + // a (binary) + // └ b (lib) + // └ c (lib) + // + // Here the dependency edge from B to C will be `Metadata`, and the + // dependency edge from A to B will be `All`. For A to be compiled, + // however, it currently actually needs the full rlib of C. This means + // that we need to synthesize a dependency edge for the dependency graph + // from A to C. That's done here. + // + // This will walk all dependencies of the current target, and if any of + // *their* dependencies are `Metadata` then we depend on the `All` of + // the target as well. This should ensure that edges changed to + // `Metadata` propagate upwards `All` dependencies to anything that + // transitively contains the `Metadata` edge. + if unit.requires_upstream_objects() { + for dep in dependencies { + depend_on_deps_of_deps(cx, &mut queue_deps, dep); + } + + fn depend_on_deps_of_deps<'a>( + cx: &Context<'a, '_>, + deps: &mut HashMap, Artifact>, + unit: Unit<'a>, + ) { + for dep in cx.dep_targets(&unit) { + if deps.insert(dep, Artifact::All).is_none() { + depend_on_deps_of_deps(cx, deps, dep); + } + } + } + } + + self.queue.queue(*unit, job, queue_deps); + *self.counts.entry(unit.pkg.package_id()).or_insert(0) += 1; + Ok(()) + } + + /// Executes all jobs necessary to build the dependency graph. + /// + /// This function will spawn off `config.jobs()` workers to build all of the + /// necessary dependencies, in order. Freshness is propagated as far as + /// possible along each dependency chain. + pub fn execute(&mut self, cx: &mut Context<'a, '_>, plan: &mut BuildPlan) -> CargoResult<()> { + let _p = profile::start("executing the job graph"); + self.queue.queue_finished(); + + // Create a helper thread for acquiring jobserver tokens + let tx = self.tx.clone(); + let helper = cx + .jobserver + .clone() + .into_helper_thread(move |token| { + drop(tx.send(Message::Token(token))); + }) + .chain_err(|| "failed to create helper thread for jobserver management")?; + + // Create a helper thread to manage the diagnostics for rustfix if + // necessary. + let tx = self.tx.clone(); + let _diagnostic_server = cx + .bcx + .build_config + .rustfix_diagnostic_server + .borrow_mut() + .take() + .map(move |srv| srv.start(move |msg| drop(tx.send(Message::FixDiagnostic(msg))))); + + // Use `crossbeam` to create a scope in which we can execute scoped + // threads. Note that this isn't currently required by Cargo but it was + // historically required. This is left in for now in case we need the + // `'a` ability for child threads in the near future, but if this + // comment has been sitting here for a long time feel free to refactor + // away crossbeam. + crossbeam_utils::thread::scope(|scope| self.drain_the_queue(cx, plan, scope, &helper)) + .expect("child threads shouldn't panic") + } + + fn drain_the_queue( + &mut self, + cx: &mut Context<'a, '_>, + plan: &mut BuildPlan, + scope: &Scope<'a>, + jobserver_helper: &HelperThread, + ) -> CargoResult<()> { + let mut tokens = Vec::new(); + let mut queue = Vec::new(); + let mut print = DiagnosticPrinter::new(cx.bcx.config); + trace!("queue: {:#?}", self.queue); + + // Iteratively execute the entire dependency graph. Each turn of the + // loop starts out by scheduling as much work as possible (up to the + // maximum number of parallel jobs we have tokens for). A local queue + // is maintained separately from the main dependency queue as one + // dequeue may actually dequeue quite a bit of work (e.g., 10 binaries + // in one package). + // + // After a job has finished we update our internal state if it was + // successful and otherwise wait for pending work to finish if it failed + // and then immediately return. + let mut error = None; + let total = self.queue.len(); + let mut finished = 0; + loop { + // Dequeue as much work as we can, learning about everything + // possible that can run. Note that this is also the point where we + // start requesting job tokens. Each job after the first needs to + // request a token. + while let Some((unit, job)) = self.queue.dequeue() { + queue.push((unit, job)); + if self.active.len() + queue.len() > 1 { + jobserver_helper.request_token(); + } + } + + // Now that we've learned of all possible work that we can execute + // try to spawn it so long as we've got a jobserver token which says + // we're able to perform some parallel work. + while error.is_none() && self.active.len() < tokens.len() + 1 && !queue.is_empty() { + let (unit, job) = queue.remove(0); + self.run(&unit, job, cx, scope)?; + } + + // If after all that we're not actually running anything then we're + // done! + if self.active.is_empty() { + break; + } + + // And finally, before we block waiting for the next event, drop any + // excess tokens we may have accidentally acquired. Due to how our + // jobserver interface is architected we may acquire a token that we + // don't actually use, and if this happens just relinquish it back + // to the jobserver itself. + tokens.truncate(self.active.len() - 1); + + // Drain all events at once to avoid displaying the progress bar + // unnecessarily. + let events: Vec<_> = self.rx.try_iter().collect(); + let events = if events.is_empty() { + self.show_progress(finished, total); + vec![self.rx.recv().unwrap()] + } else { + events + }; + + for event in events { + match event { + Message::Run(cmd) => { + cx.bcx + .config + .shell() + .verbose(|c| c.status("Running", &cmd))?; + } + Message::BuildPlanMsg(module_name, cmd, filenames) => { + plan.update(&module_name, &cmd, &filenames)?; + } + Message::Stdout(out) => { + self.progress.clear(); + println!("{}", out); + } + Message::Stderr(err) => { + let mut shell = cx.bcx.config.shell(); + shell.print_ansi(err.as_bytes())?; + shell.err().write_all(b"\n")?; + } + Message::FixDiagnostic(msg) => { + print.print(&msg)?; + } + Message::Finish(id, artifact, result) => { + let unit = match artifact { + // If `id` has completely finished we remove it + // from the `active` map ... + Artifact::All => { + info!("end: {:?}", id); + finished += 1; + self.active.remove(&id).unwrap() + } + // ... otherwise if it hasn't finished we leave it + // in there as we'll get another `Finish` later on. + Artifact::Metadata => { + info!("end (meta): {:?}", id); + self.active[&id] + } + }; + info!("end ({:?}): {:?}", unit, result); + match result { + Ok(()) => self.finish(&unit, artifact, cx)?, + Err(e) => { + let msg = "The following warnings were emitted during compilation:"; + self.emit_warnings(Some(msg), &unit, cx)?; + + if !self.active.is_empty() { + error = Some(failure::format_err!("build failed")); + handle_error(&e, &mut *cx.bcx.config.shell()); + cx.bcx.config.shell().warn( + "build failed, waiting for other \ + jobs to finish...", + )?; + } else { + error = Some(e); + } + } + } + } + Message::Token(acquired_token) => { + tokens.push( + acquired_token.chain_err(|| "failed to acquire jobserver token")?, + ); + } + } + } + } + self.progress.clear(); + + let build_type = if self.is_release { "release" } else { "dev" }; + // NOTE: this may be a bit inaccurate, since this may not display the + // profile for what was actually built. Profile overrides can change + // these settings, and in some cases different targets are built with + // different profiles. To be accurate, it would need to collect a + // list of Units built, and maybe display a list of the different + // profiles used. However, to keep it simple and compatible with old + // behavior, we just display what the base profile is. + let profile = cx.bcx.profiles.base_profile(self.is_release); + let mut opt_type = String::from(if profile.opt_level.as_str() == "0" { + "unoptimized" + } else { + "optimized" + }); + if profile.debuginfo.unwrap_or(0) != 0 { + opt_type += " + debuginfo"; + } + + let time_elapsed = util::elapsed(cx.bcx.config.creation_time().elapsed()); + + if let Some(e) = error { + Err(e) + } else if self.queue.is_empty() && queue.is_empty() { + let message = format!( + "{} [{}] target(s) in {}", + build_type, opt_type, time_elapsed + ); + if !cx.bcx.build_config.build_plan { + cx.bcx.config.shell().status("Finished", message)?; + } + Ok(()) + } else { + debug!("queue: {:#?}", self.queue); + Err(internal("finished with jobs still left in the queue")) + } + } + + fn show_progress(&mut self, count: usize, total: usize) { + let active_names = self + .active + .values() + .map(|u| self.name_for_progress(u)) + .collect::>(); + drop( + self.progress + .tick_now(count, total, &format!(": {}", active_names.join(", "))), + ); + } + + fn name_for_progress(&self, unit: &Unit<'_>) -> String { + let pkg_name = unit.pkg.name(); + match unit.mode { + CompileMode::Doc { .. } => format!("{}(doc)", pkg_name), + CompileMode::RunCustomBuild => format!("{}(build)", pkg_name), + _ => { + let annotation = match unit.target.kind() { + TargetKind::Lib(_) => return pkg_name.to_string(), + TargetKind::CustomBuild => return format!("{}(build.rs)", pkg_name), + TargetKind::Bin => "bin", + TargetKind::Test => "test", + TargetKind::Bench => "bench", + TargetKind::ExampleBin | TargetKind::ExampleLib(_) => "example", + }; + format!("{}({})", unit.target.name(), annotation) + } + } + } + + /// Executes a job in the `scope` given, pushing the spawned thread's + /// handled onto `threads`. + fn run( + &mut self, + unit: &Unit<'a>, + job: Job, + cx: &Context<'a, '_>, + scope: &Scope<'a>, + ) -> CargoResult<()> { + let id = self.next_id; + self.next_id = id.checked_add(1).unwrap(); + + info!("start {}: {:?}", id, unit); + + assert!(self.active.insert(id, *unit).is_none()); + *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1; + + let my_tx = self.tx.clone(); + let fresh = job.freshness(); + let rmeta_required = cx.rmeta_required(unit); + let doit = move || { + let state = JobState { + id, + tx: my_tx.clone(), + rmeta_required: Cell::new(rmeta_required), + _marker: marker::PhantomData, + }; + let res = job.run(&state); + + // If the `rmeta_required` wasn't consumed but it was set + // previously, then we either have: + // + // 1. The `job` didn't do anything because it was "fresh". + // 2. The `job` returned an error and didn't reach the point where + // it called `rmeta_produced`. + // 3. We forgot to call `rmeta_produced` and there's a bug in Cargo. + // + // Ruling out the third, the other two are pretty common for 2 + // we'll just naturally abort the compilation operation but for 1 + // we need to make sure that the metadata is flagged as produced so + // send a synthetic message here. + if state.rmeta_required.get() && res.is_ok() { + my_tx + .send(Message::Finish(id, Artifact::Metadata, Ok(()))) + .unwrap(); + } + + my_tx.send(Message::Finish(id, Artifact::All, res)).unwrap(); + }; + + if !cx.bcx.build_config.build_plan { + // Print out some nice progress information. + self.note_working_on(cx.bcx.config, unit, fresh)?; + } + + match fresh { + Freshness::Fresh => doit(), + Freshness::Dirty => { + scope.spawn(move |_| doit()); + } + } + + Ok(()) + } + + fn emit_warnings( + &mut self, + msg: Option<&str>, + unit: &Unit<'a>, + cx: &mut Context<'_, '_>, + ) -> CargoResult<()> { + let output = cx.build_state.outputs.lock().unwrap(); + let bcx = &mut cx.bcx; + if let Some(output) = output.get(&(unit.pkg.package_id(), unit.kind)) { + if !output.warnings.is_empty() { + if let Some(msg) = msg { + writeln!(bcx.config.shell().err(), "{}\n", msg)?; + } + + for warning in output.warnings.iter() { + bcx.config.shell().warn(warning)?; + } + + if msg.is_some() { + // Output an empty line. + writeln!(bcx.config.shell().err())?; + } + } + } + + Ok(()) + } + + fn finish( + &mut self, + unit: &Unit<'a>, + artifact: Artifact, + cx: &mut Context<'_, '_>, + ) -> CargoResult<()> { + if unit.mode.is_run_custom_build() && cx.bcx.show_warnings(unit.pkg.package_id()) { + self.emit_warnings(None, unit, cx)?; + } + self.queue.finish(unit, &artifact); + Ok(()) + } + + // This isn't super trivial because we don't want to print loads and + // loads of information to the console, but we also want to produce a + // faithful representation of what's happening. This is somewhat nuanced + // as a package can start compiling *very* early on because of custom + // build commands and such. + // + // In general, we try to print "Compiling" for the first nontrivial task + // run for a package, regardless of when that is. We then don't print + // out any more information for a package after we've printed it once. + fn note_working_on( + &mut self, + config: &Config, + unit: &Unit<'a>, + fresh: Freshness, + ) -> CargoResult<()> { + if (self.compiled.contains(&unit.pkg.package_id()) && !unit.mode.is_doc()) + || (self.documented.contains(&unit.pkg.package_id()) && unit.mode.is_doc()) + { + return Ok(()); + } + + match fresh { + // Any dirty stage which runs at least one command gets printed as + // being a compiled package. + Dirty => { + if unit.mode.is_doc() { + self.documented.insert(unit.pkg.package_id()); + config.shell().status("Documenting", unit.pkg)?; + } else if unit.mode.is_doc_test() { + // Skip doc test. + } else { + self.compiled.insert(unit.pkg.package_id()); + if unit.mode.is_check() { + config.shell().status("Checking", unit.pkg)?; + } else { + config.shell().status("Compiling", unit.pkg)?; + } + } + } + Fresh => { + // If doc test are last, only print "Fresh" if nothing has been printed. + if self.counts[&unit.pkg.package_id()] == 0 + && !(unit.mode.is_doc_test() && self.compiled.contains(&unit.pkg.package_id())) + { + self.compiled.insert(unit.pkg.package_id()); + config.shell().verbose(|c| c.status("Fresh", unit.pkg))?; + } + } + } + Ok(()) + } +} diff --git a/src/cargo/core/compiler/layout.rs b/src/cargo/core/compiler/layout.rs new file mode 100644 index 00000000000..40140e8ce98 --- /dev/null +++ b/src/cargo/core/compiler/layout.rs @@ -0,0 +1,211 @@ +//! Management of the directory layout of a build +//! +//! The directory layout is a little tricky at times, hence a separate file to +//! house this logic. The current layout looks like this: +//! +//! ```text +//! # This is the root directory for all output, the top-level package +//! # places all of its output here. +//! target/ +//! +//! # This is the root directory for all output of *dependencies* +//! deps/ +//! +//! # Root directory for all compiled examples +//! examples/ +//! +//! # This is the location at which the output of all custom build +//! # commands are rooted +//! build/ +//! +//! # Each package gets its own directory where its build script and +//! # script output are placed +//! $pkg1/ +//! $pkg2/ +//! $pkg3/ +//! +//! # Each directory package has a `out` directory where output +//! # is placed. +//! out/ +//! +//! # This is the location at which the output of all old custom build +//! # commands are rooted +//! native/ +//! +//! # Each package gets its own directory for where its output is +//! # placed. We can't track exactly what's getting put in here, so +//! # we just assume that all relevant output is in these +//! # directories. +//! $pkg1/ +//! $pkg2/ +//! $pkg3/ +//! +//! # Directory used to store incremental data for the compiler (when +//! # incremental is enabled. +//! incremental/ +//! +//! # Hidden directory that holds all of the fingerprint files for all +//! # packages +//! .fingerprint/ +//! ``` + +use std::fs; +use std::io; +use std::path::{Path, PathBuf}; + +use crate::core::Workspace; +use crate::util::{CargoResult, Config, FileLock, Filesystem}; + +/// Contains the paths of all target output locations. +/// +/// See module docs for more information. +pub struct Layout { + root: PathBuf, + deps: PathBuf, + native: PathBuf, + build: PathBuf, + incremental: PathBuf, + fingerprint: PathBuf, + examples: PathBuf, + /// The lock file for a build, will be unlocked when this struct is `drop`ped. + _lock: FileLock, +} + +pub fn is_bad_artifact_name(name: &str) -> bool { + ["deps", "examples", "build", "native", "incremental"] + .iter() + .any(|&reserved| reserved == name) +} + +impl Layout { + /// Calculate the paths for build output, lock the build directory, and return as a Layout. + /// + /// This function will block if the directory is already locked. + /// + /// Differs from `at` in that this calculates the root path from the workspace target directory, + /// adding the target triple and the profile (debug, release, ...). + pub fn new(ws: &Workspace<'_>, triple: Option<&str>, dest: &str) -> CargoResult { + let mut path = ws.target_dir(); + // Flexible target specifications often point at json files, so interpret + // the target triple as a Path and then just use the file stem as the + // component for the directory name in that case. + if let Some(triple) = triple { + let triple = Path::new(triple); + if triple.extension().and_then(|s| s.to_str()) == Some("json") { + path.push( + triple + .file_stem() + .ok_or_else(|| failure::format_err!("invalid target"))?, + ); + } else { + path.push(triple); + } + } + path.push(dest); + Layout::at(ws.config(), path) + } + + /// Calculate the paths for build output, lock the build directory, and return as a Layout. + /// + /// This function will block if the directory is already locked. + pub fn at(config: &Config, root: Filesystem) -> CargoResult { + // For now we don't do any more finer-grained locking on the artifact + // directory, so just lock the entire thing for the duration of this + // compile. + let lock = root.open_rw(".cargo-lock", config, "build directory")?; + let root = root.into_path_unlocked(); + + Ok(Layout { + deps: root.join("deps"), + native: root.join("native"), + build: root.join("build"), + incremental: root.join("incremental"), + fingerprint: root.join(".fingerprint"), + examples: root.join("examples"), + root, + _lock: lock, + }) + } + + #[cfg(not(target_os = "macos"))] + fn exclude_from_backups(&self, _: &Path) {} + + #[cfg(target_os = "macos")] + /// Marks files or directories as excluded from Time Machine on macOS + /// + /// This is recommended to prevent derived/temporary files from bloating backups. + fn exclude_from_backups(&self, path: &Path) { + use core_foundation::base::TCFType; + use core_foundation::{number, string, url}; + use std::ptr; + + // For compatibility with 10.7 a string is used instead of global kCFURLIsExcludedFromBackupKey + let is_excluded_key: Result = "NSURLIsExcludedFromBackupKey".parse(); + let path = url::CFURL::from_path(path, false); + if let (Some(path), Ok(is_excluded_key)) = (path, is_excluded_key) { + unsafe { + url::CFURLSetResourcePropertyForKey( + path.as_concrete_TypeRef(), + is_excluded_key.as_concrete_TypeRef(), + number::kCFBooleanTrue as *const _, + ptr::null_mut(), + ); + } + } + // Errors are ignored, since it's an optional feature and failure + // doesn't prevent Cargo from working + } + + /// Makes sure all directories stored in the Layout exist on the filesystem. + pub fn prepare(&mut self) -> io::Result<()> { + if fs::metadata(&self.root).is_err() { + fs::create_dir_all(&self.root)?; + self.exclude_from_backups(&self.root); + } + + mkdir(&self.deps)?; + mkdir(&self.native)?; + mkdir(&self.incremental)?; + mkdir(&self.fingerprint)?; + mkdir(&self.examples)?; + mkdir(&self.build)?; + + return Ok(()); + + fn mkdir(dir: &Path) -> io::Result<()> { + if fs::metadata(&dir).is_err() { + fs::create_dir(dir)?; + } + Ok(()) + } + } + + /// Fetch the root path. + pub fn dest(&self) -> &Path { + &self.root + } + /// Fetch the deps path. + pub fn deps(&self) -> &Path { + &self.deps + } + /// Fetch the examples path. + pub fn examples(&self) -> &Path { + &self.examples + } + /// Fetch the root path. + pub fn root(&self) -> &Path { + &self.root + } + /// Fetch the incremental path. + pub fn incremental(&self) -> &Path { + &self.incremental + } + /// Fetch the fingerprint path. + pub fn fingerprint(&self) -> &Path { + &self.fingerprint + } + /// Fetch the build path. + pub fn build(&self) -> &Path { + &self.build + } +} diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs new file mode 100644 index 00000000000..a0a933918a0 --- /dev/null +++ b/src/cargo/core/compiler/mod.rs @@ -0,0 +1,1343 @@ +mod build_config; +mod build_context; +mod build_plan; +mod compilation; +mod context; +mod custom_build; +mod fingerprint; +mod job; +mod job_queue; +mod layout; +mod output_depinfo; +mod unit; + +use std::env; +use std::ffi::{OsStr, OsString}; +use std::fs::{self, File}; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use failure::{bail, Error}; +use lazycell::LazyCell; +use log::debug; +use same_file::is_same_file; +use serde::Serialize; + +pub use self::build_config::{BuildConfig, CompileMode, MessageFormat}; +pub use self::build_context::{BuildContext, FileFlavor, TargetConfig, TargetInfo}; +use self::build_plan::BuildPlan; +pub use self::compilation::{Compilation, Doctest}; +pub use self::context::Context; +pub use self::custom_build::{BuildMap, BuildOutput, BuildScripts}; +pub use self::job::Freshness; +use self::job::{Job, Work}; +use self::job_queue::{JobQueue, JobState}; +pub use self::layout::is_bad_artifact_name; +use self::output_depinfo::output_depinfo; +pub use crate::core::compiler::unit::{Unit, UnitInterner}; +use crate::core::manifest::TargetSourcePath; +use crate::core::profiles::{Lto, PanicStrategy, Profile}; +use crate::core::Feature; +use crate::core::{PackageId, Target}; +use crate::util::errors::{CargoResult, CargoResultExt, Internal, ProcessError}; +use crate::util::machine_message::Message; +use crate::util::paths; +use crate::util::{self, machine_message, ProcessBuilder}; +use crate::util::{internal, join_paths, profile}; + +/// Indicates whether an object is for the host architcture or the target architecture. +/// +/// These will be the same unless cross-compiling. +#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord, Serialize)] +pub enum Kind { + Host, + Target, +} + +/// A glorified callback for executing calls to rustc. Rather than calling rustc +/// directly, we'll use an `Executor`, giving clients an opportunity to intercept +/// the build calls. +pub trait Executor: Send + Sync + 'static { + /// Called after a rustc process invocation is prepared up-front for a given + /// unit of work (may still be modified for runtime-known dependencies, when + /// the work is actually executed). + fn init<'a, 'cfg>(&self, _cx: &Context<'a, 'cfg>, _unit: &Unit<'a>) {} + + /// In case of an `Err`, Cargo will not continue with the build process for + /// this package. + fn exec( + &self, + cmd: ProcessBuilder, + id: PackageId, + target: &Target, + mode: CompileMode, + on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, + on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, + ) -> CargoResult<()>; + + /// Queried when queuing each unit of work. If it returns true, then the + /// unit will always be rebuilt, independent of whether it needs to be. + fn force_rebuild(&self, _unit: &Unit<'_>) -> bool { + false + } +} + +/// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's +/// default behaviour. +#[derive(Copy, Clone)] +pub struct DefaultExecutor; + +impl Executor for DefaultExecutor { + fn exec( + &self, + cmd: ProcessBuilder, + _id: PackageId, + _target: &Target, + _mode: CompileMode, + on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, + on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, + ) -> CargoResult<()> { + cmd.exec_with_streaming(on_stdout_line, on_stderr_line, false) + .map(drop) + } +} + +fn compile<'a, 'cfg: 'a>( + cx: &mut Context<'a, 'cfg>, + jobs: &mut JobQueue<'a, 'cfg>, + plan: &mut BuildPlan, + unit: &Unit<'a>, + exec: &Arc, + force_rebuild: bool, +) -> CargoResult<()> { + let bcx = cx.bcx; + let build_plan = bcx.build_config.build_plan; + if !cx.compiled.insert(*unit) { + return Ok(()); + } + + // Build up the work to be done to compile this unit, enqueuing it once + // we've got everything constructed. + let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); + fingerprint::prepare_init(cx, unit)?; + cx.links.validate(bcx.resolve, unit)?; + + let job = if unit.mode.is_run_custom_build() { + custom_build::prepare(cx, unit)? + } else if unit.mode.is_doc_test() { + // We run these targets later, so this is just a no-op for now. + Job::new(Work::noop(), Freshness::Fresh) + } else if build_plan { + Job::new(rustc(cx, unit, &exec.clone())?, Freshness::Dirty) + } else { + let force = exec.force_rebuild(unit) || force_rebuild; + let mut job = fingerprint::prepare_target(cx, unit, force)?; + job.before(if job.freshness() == Freshness::Dirty { + let work = if unit.mode.is_doc() { + rustdoc(cx, unit)? + } else { + rustc(cx, unit, exec)? + }; + work.then(link_targets(cx, unit, false)?) + } else { + let work = if cx.bcx.build_config.cache_messages() + && cx.bcx.show_warnings(unit.pkg.package_id()) + { + replay_output_cache( + unit.pkg.package_id(), + unit.target, + cx.files().message_cache_path(unit), + cx.bcx.build_config.message_format, + cx.bcx.config.shell().supports_color(), + ) + } else { + Work::noop() + }; + // Need to link targets on both the dirty and fresh. + work.then(link_targets(cx, unit, true)?) + }); + + job + }; + jobs.enqueue(cx, unit, job)?; + drop(p); + + // Be sure to compile all dependencies of this target as well. + for unit in cx.dep_targets(unit).iter() { + compile(cx, jobs, plan, unit, exec, false)?; + } + if build_plan { + plan.add(cx, unit)?; + } + + Ok(()) +} + +fn rustc<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, + exec: &Arc, +) -> CargoResult { + let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?; + let build_plan = cx.bcx.build_config.build_plan; + + let name = unit.pkg.name().to_string(); + let buildkey = unit.buildkey(); + + add_cap_lints(cx.bcx, unit, &mut rustc); + + let outputs = cx.outputs(unit)?; + let root = cx.files().out_dir(unit); + let kind = unit.kind; + + // Prepare the native lib state (extra `-L` and `-l` flags). + let build_state = cx.build_state.clone(); + let current_id = unit.pkg.package_id(); + let build_deps = load_build_deps(cx, unit); + + // If we are a binary and the package also contains a library, then we + // don't pass the `-l` flags. + let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); + let pass_cdylib_link_args = unit.target.is_cdylib(); + let do_rename = unit.target.allows_underscores() && !unit.mode.is_any_test(); + let real_name = unit.target.name().to_string(); + let crate_name = unit.target.crate_name(); + + // Rely on `target_filenames` iterator as source of truth rather than rederiving filestem. + let rustc_dep_info_loc = if do_rename && cx.files().metadata(unit).is_none() { + root.join(&crate_name) + } else { + root.join(&cx.files().file_stem(unit)) + } + .with_extension("d"); + let dep_info_loc = fingerprint::dep_info_loc(cx, unit); + + rustc.args(cx.bcx.rustflags_args(unit)); + if cx.bcx.config.cli_unstable().binary_dep_depinfo { + rustc.arg("-Zbinary-dep-depinfo"); + } + let mut output_options = OutputOptions::new(cx, unit); + let package_id = unit.pkg.package_id(); + let target = unit.target.clone(); + let mode = unit.mode; + + exec.init(cx, unit); + let exec = exec.clone(); + + let root_output = cx.files().host_root().to_path_buf(); + let target_dir = cx.bcx.ws.target_dir().into_path_unlocked(); + let pkg_root = unit.pkg.root().to_path_buf(); + let cwd = rustc + .get_cwd() + .unwrap_or_else(|| cx.bcx.config.cwd()) + .to_path_buf(); + let fingerprint_dir = cx.files().fingerprint_dir(unit); + + return Ok(Work::new(move |state| { + // Only at runtime have we discovered what the extra -L and -l + // arguments are for native libraries, so we process those here. We + // also need to be sure to add any -L paths for our plugins to the + // dynamic library load path as a plugin's dynamic library may be + // located somewhere in there. + // Finally, if custom environment variables have been produced by + // previous build scripts, we include them in the rustc invocation. + if let Some(build_deps) = build_deps { + let build_state = build_state.outputs.lock().unwrap(); + if !build_plan { + add_native_deps( + &mut rustc, + &build_state, + &build_deps, + pass_l_flag, + pass_cdylib_link_args, + current_id, + )?; + add_plugin_deps(&mut rustc, &build_state, &build_deps, &root_output)?; + } + add_custom_env(&mut rustc, &build_state, current_id, kind)?; + } + + for output in outputs.iter() { + // If there is both an rmeta and rlib, rustc will prefer to use the + // rlib, even if it is older. Therefore, we must delete the rlib to + // force using the new rmeta. + if output.path.extension() == Some(OsStr::new("rmeta")) { + let dst = root.join(&output.path).with_extension("rlib"); + if dst.exists() { + paths::remove_file(&dst)?; + } + } + } + + fn internal_if_simple_exit_code(err: Error) -> Error { + // If a signal on unix (`code == None`) or an abnormal termination + // on Windows (codes like `0xC0000409`), don't hide the error details. + match err + .downcast_ref::() + .as_ref() + .and_then(|perr| perr.exit.and_then(|e| e.code())) + { + Some(n) if n < 128 => Internal::new(err).into(), + _ => err, + } + } + + state.running(&rustc); + let timestamp = paths::set_invocation_time(&fingerprint_dir)?; + if build_plan { + state.build_plan(buildkey, rustc.clone(), outputs.clone()); + } else { + exec.exec( + rustc, + package_id, + &target, + mode, + &mut |line| on_stdout_line(state, line, package_id, &target), + &mut |line| on_stderr_line(state, line, package_id, &target, &mut output_options), + ) + .map_err(internal_if_simple_exit_code) + .chain_err(|| format!("Could not compile `{}`.", name))?; + } + + if do_rename && real_name != crate_name { + let dst = &outputs[0].path; + let src = dst.with_file_name( + dst.file_name() + .unwrap() + .to_str() + .unwrap() + .replace(&real_name, &crate_name), + ); + if src.exists() && src.file_name() != dst.file_name() { + fs::rename(&src, &dst) + .chain_err(|| internal(format!("could not rename crate {:?}", src)))?; + } + } + + if rustc_dep_info_loc.exists() { + fingerprint::translate_dep_info( + &rustc_dep_info_loc, + &dep_info_loc, + &cwd, + &pkg_root, + &target_dir, + // Do not track source files in the fingerprint for registry dependencies. + current_id.source_id().is_path(), + ) + .chain_err(|| { + internal(format!( + "could not parse/generate dep info at: {}", + rustc_dep_info_loc.display() + )) + })?; + filetime::set_file_times(dep_info_loc, timestamp, timestamp)?; + } + + Ok(()) + })); + + // Add all relevant `-L` and `-l` flags from dependencies (now calculated and + // present in `state`) to the command provided. + fn add_native_deps( + rustc: &mut ProcessBuilder, + build_state: &BuildMap, + build_scripts: &BuildScripts, + pass_l_flag: bool, + pass_cdylib_link_args: bool, + current_id: PackageId, + ) -> CargoResult<()> { + for key in build_scripts.to_link.iter() { + let output = build_state.get(key).ok_or_else(|| { + internal(format!( + "couldn't find build state for {}/{:?}", + key.0, key.1 + )) + })?; + for path in output.library_paths.iter() { + rustc.arg("-L").arg(path); + } + if key.0 == current_id { + for cfg in &output.cfgs { + rustc.arg("--cfg").arg(cfg); + } + if pass_l_flag { + for name in output.library_links.iter() { + rustc.arg("-l").arg(name); + } + } + if pass_cdylib_link_args { + for arg in output.linker_args.iter() { + let link_arg = format!("link-arg={}", arg); + rustc.arg("-C").arg(link_arg); + } + } + } + } + Ok(()) + } + + // Add all custom environment variables present in `state` (after they've + // been put there by one of the `build_scripts`) to the command provided. + fn add_custom_env( + rustc: &mut ProcessBuilder, + build_state: &BuildMap, + current_id: PackageId, + kind: Kind, + ) -> CargoResult<()> { + let key = (current_id, kind); + if let Some(output) = build_state.get(&key) { + for &(ref name, ref value) in output.env.iter() { + rustc.env(name, value); + } + } + Ok(()) + } +} + +/// Link the compiled target (often of form `foo-{metadata_hash}`) to the +/// final target. This must happen during both "Fresh" and "Compile". +fn link_targets<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, + fresh: bool, +) -> CargoResult { + let bcx = cx.bcx; + let outputs = cx.outputs(unit)?; + let export_dir = cx.files().export_dir(); + let package_id = unit.pkg.package_id(); + let profile = unit.profile; + let unit_mode = unit.mode; + let features = bcx + .resolve + .features_sorted(package_id) + .into_iter() + .map(|s| s.to_owned()) + .collect(); + let json_messages = bcx.build_config.emit_json(); + let executable = cx.get_executable(unit)?; + let mut target = unit.target.clone(); + if let TargetSourcePath::Metabuild = target.src_path() { + // Give it something to serialize. + let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); + target.set_src_path(TargetSourcePath::Path(path)); + } + + Ok(Work::new(move |state| { + // If we're a "root crate", e.g., the target of this compilation, then we + // hard link our outputs out of the `deps` directory into the directory + // above. This means that `cargo build` will produce binaries in + // `target/debug` which one probably expects. + let mut destinations = vec![]; + for output in outputs.iter() { + let src = &output.path; + // This may have been a `cargo rustc` command which changes the + // output, so the source may not actually exist. + if !src.exists() { + continue; + } + let dst = match output.hardlink.as_ref() { + Some(dst) => dst, + None => { + destinations.push(src.clone()); + continue; + } + }; + destinations.push(dst.clone()); + hardlink_or_copy(src, dst)?; + if let Some(ref path) = output.export_path { + let export_dir = export_dir.as_ref().unwrap(); + if !export_dir.exists() { + fs::create_dir_all(export_dir)?; + } + + hardlink_or_copy(src, path)?; + } + } + + if json_messages { + let art_profile = machine_message::ArtifactProfile { + opt_level: profile.opt_level.as_str(), + debuginfo: profile.debuginfo, + debug_assertions: profile.debug_assertions, + overflow_checks: profile.overflow_checks, + test: unit_mode.is_any_test(), + }; + + let msg = machine_message::Artifact { + package_id, + target: &target, + profile: art_profile, + features, + filenames: destinations, + executable, + fresh, + } + .to_json_string(); + state.stdout(msg); + } + Ok(()) + })) +} + +fn hardlink_or_copy(src: &Path, dst: &Path) -> CargoResult<()> { + debug!("linking {} to {}", src.display(), dst.display()); + if is_same_file(src, dst).unwrap_or(false) { + return Ok(()); + } + if dst.exists() { + paths::remove_file(&dst)?; + } + + let link_result = if src.is_dir() { + #[cfg(target_os = "redox")] + use std::os::redox::fs::symlink; + #[cfg(unix)] + use std::os::unix::fs::symlink; + #[cfg(windows)] + use std::os::windows::fs::symlink_dir as symlink; + + let dst_dir = dst.parent().unwrap(); + let src = if src.starts_with(dst_dir) { + src.strip_prefix(dst_dir).unwrap() + } else { + src + }; + symlink(src, dst) + } else { + fs::hard_link(src, dst) + }; + link_result + .or_else(|err| { + debug!("link failed {}. falling back to fs::copy", err); + fs::copy(src, dst).map(|_| ()) + }) + .chain_err(|| { + format!( + "failed to link or copy `{}` to `{}`", + src.display(), + dst.display() + ) + })?; + Ok(()) +} + +fn load_build_deps(cx: &Context<'_, '_>, unit: &Unit<'_>) -> Option> { + cx.build_scripts.get(unit).cloned() +} + +// For all plugin dependencies, add their -L paths (now calculated and +// present in `state`) to the dynamic library load path for the command to +// execute. +fn add_plugin_deps( + rustc: &mut ProcessBuilder, + build_state: &BuildMap, + build_scripts: &BuildScripts, + root_output: &PathBuf, +) -> CargoResult<()> { + let var = util::dylib_path_envvar(); + let search_path = rustc.get_env(var).unwrap_or_default(); + let mut search_path = env::split_paths(&search_path).collect::>(); + for &id in build_scripts.plugins.iter() { + let output = build_state + .get(&(id, Kind::Host)) + .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", id)))?; + search_path.append(&mut filter_dynamic_search_path( + output.library_paths.iter(), + root_output, + )); + } + let search_path = join_paths(&search_path, var)?; + rustc.env(var, &search_path); + Ok(()) +} + +// Determine paths to add to the dynamic search path from -L entries +// +// Strip off prefixes like "native=" or "framework=" and filter out directories +// **not** inside our output directory since they are likely spurious and can cause +// clashes with system shared libraries (issue #3366). +fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &PathBuf) -> Vec +where + I: Iterator, +{ + let mut search_path = vec![]; + for dir in paths { + let dir = match dir.to_str() { + Some(s) => { + let mut parts = s.splitn(2, '='); + match (parts.next(), parts.next()) { + (Some("native"), Some(path)) + | (Some("crate"), Some(path)) + | (Some("dependency"), Some(path)) + | (Some("framework"), Some(path)) + | (Some("all"), Some(path)) => path.into(), + _ => dir.clone(), + } + } + None => dir.clone(), + }; + if dir.starts_with(&root_output) { + search_path.push(dir); + } else { + debug!( + "Not including path {} in runtime library search path because it is \ + outside target root {}", + dir.display(), + root_output.display() + ); + } + } + search_path +} + +fn prepare_rustc<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + crate_types: &[&str], + unit: &Unit<'a>, +) -> CargoResult { + let is_primary = cx.is_primary_package(unit); + + let mut base = cx + .compilation + .rustc_process(unit.pkg, unit.target, is_primary)?; + base.inherit_jobserver(&cx.jobserver); + build_base_args(cx, &mut base, unit, crate_types)?; + build_deps_args(&mut base, cx, unit)?; + Ok(base) +} + +fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult { + let bcx = cx.bcx; + let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg, unit.target)?; + rustdoc.inherit_jobserver(&cx.jobserver); + rustdoc.arg("--crate-name").arg(&unit.target.crate_name()); + add_path_args(bcx, unit, &mut rustdoc); + add_cap_lints(bcx, unit, &mut rustdoc); + add_color(bcx, &mut rustdoc); + + if unit.kind != Kind::Host { + if let Some(ref target) = bcx.build_config.requested_target { + rustdoc.arg("--target").arg(target); + } + } + + let doc_dir = cx.files().out_dir(unit); + + // Create the documentation directory ahead of time as rustdoc currently has + // a bug where concurrent invocations will race to create this directory if + // it doesn't already exist. + fs::create_dir_all(&doc_dir)?; + + rustdoc.arg("-o").arg(doc_dir); + + // Need to keep a correct order on the features, so get the sorted name first, + // then resolve the specified platform. + for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) { + if let Some(platform) = bcx.resolve.features(unit.pkg.package_id()).get(feat) { + if bcx.platform_activated(platform.as_ref(), unit.kind) { + rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + } else { + bail!("Failed to get the target for the feature `{}`", feat); + } + } + + add_error_format(cx, &mut rustdoc, false, false)?; + + if let Some(args) = bcx.extra_args_for(unit) { + rustdoc.args(args); + } + + build_deps_args(&mut rustdoc, cx, unit)?; + + rustdoc.args(bcx.rustdocflags_args(unit)); + + let name = unit.pkg.name().to_string(); + let build_state = cx.build_state.clone(); + let key = (unit.pkg.package_id(), unit.kind); + let package_id = unit.pkg.package_id(); + let target = unit.target.clone(); + let mut output_options = OutputOptions::new(cx, unit); + + Ok(Work::new(move |state| { + if let Some(output) = build_state.outputs.lock().unwrap().get(&key) { + for cfg in output.cfgs.iter() { + rustdoc.arg("--cfg").arg(cfg); + } + for &(ref name, ref value) in output.env.iter() { + rustdoc.env(name, value); + } + } + state.running(&rustdoc); + + rustdoc + .exec_with_streaming( + &mut |line| on_stdout_line(state, line, package_id, &target), + &mut |line| on_stderr_line(state, line, package_id, &target, &mut output_options), + false, + ) + .chain_err(|| format!("Could not document `{}`.", name))?; + Ok(()) + })) +} + +// The path that we pass to rustc is actually fairly important because it will +// show up in error messages (important for readability), debug information +// (important for caching), etc. As a result we need to be pretty careful how we +// actually invoke rustc. +// +// In general users don't expect `cargo build` to cause rebuilds if you change +// directories. That could be if you just change directories in the package or +// if you literally move the whole package wholesale to a new directory. As a +// result we mostly don't factor in `cwd` to this calculation. Instead we try to +// track the workspace as much as possible and we update the current directory +// of rustc/rustdoc where appropriate. +// +// The first returned value here is the argument to pass to rustc, and the +// second is the cwd that rustc should operate in. +fn path_args(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>) -> (PathBuf, PathBuf) { + let ws_root = bcx.ws.root(); + let src = match unit.target.src_path() { + TargetSourcePath::Path(path) => path.to_path_buf(), + TargetSourcePath::Metabuild => unit.pkg.manifest().metabuild_path(bcx.ws.target_dir()), + }; + assert!(src.is_absolute()); + if unit.pkg.package_id().source_id().is_path() { + if let Ok(path) = src.strip_prefix(ws_root) { + return (path.to_path_buf(), ws_root.to_path_buf()); + } + } + (src, unit.pkg.root().to_path_buf()) +} + +fn add_path_args(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>, cmd: &mut ProcessBuilder) { + let (arg, cwd) = path_args(bcx, unit); + cmd.arg(arg); + cmd.cwd(cwd); +} + +fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit<'_>, cmd: &mut ProcessBuilder) { + // If this is an upstream dep we don't want warnings from, turn off all + // lints. + if !bcx.show_warnings(unit.pkg.package_id()) { + cmd.arg("--cap-lints").arg("allow"); + + // If this is an upstream dep but we *do* want warnings, make sure that they + // don't fail compilation. + } else if !unit.pkg.package_id().source_id().is_path() { + cmd.arg("--cap-lints").arg("warn"); + } +} + +fn add_color(bcx: &BuildContext<'_, '_>, cmd: &mut ProcessBuilder) { + let shell = bcx.config.shell(); + let color = if shell.supports_color() { + "always" + } else { + "never" + }; + cmd.args(&["--color", color]); +} + +/// Add error-format flags to the command. +/// +/// This is rather convoluted right now. The general overview is: +/// - If -Zcache-messages or `build.pipelining` is enabled, Cargo always uses +/// JSON output. This has several benefits, such as being easier to parse, +/// handles changing formats (for replaying cached messages), ensures +/// atomic output (so messages aren't interleaved), etc. +/// - `supports_termcolor` is a temporary flag. rustdoc does not yet support +/// the `--json-rendered` flag, but it is intended to fix that soon. +/// - `short` output is not yet supported for JSON output. We haven't yet +/// decided how this problem will be resolved. Probably either adding +/// "short" to the JSON output, or more ambitiously moving diagnostic +/// rendering to an external library that Cargo can share with rustc. +/// +/// It is intended in the future that Cargo *always* uses the JSON output, and +/// this function can be simplified. The above issues need to be resolved, the +/// flags need to be stabilized, and we need more testing to ensure there +/// aren't any regressions. +fn add_error_format( + cx: &Context<'_, '_>, + cmd: &mut ProcessBuilder, + pipelined: bool, + supports_termcolor: bool, +) -> CargoResult<()> { + // If this unit is producing a required rmeta file then we need to know + // when the rmeta file is ready so we can signal to the rest of Cargo that + // it can continue dependent compilations. To do this we are currently + // required to switch the compiler into JSON message mode, but we still + // want to present human readable errors as well. (this rabbit hole just + // goes and goes) + // + // All that means is that if we're not already in JSON mode we need to + // switch to JSON mode, ensure that rustc error messages can be rendered + // prettily, and then when parsing JSON messages from rustc we need to + // internally understand that we should extract the `rendered` field and + // present it if we can. + if cx.bcx.build_config.cache_messages() || pipelined { + cmd.arg("--error-format=json").arg("-Zunstable-options"); + if supports_termcolor { + cmd.arg("--json-rendered=termcolor"); + } + if cx.bcx.build_config.message_format == MessageFormat::Short { + // FIXME(rust-lang/rust#60419): right now we have no way of + // turning on JSON messages from the compiler and also asking + // the rendered field to be in the `short` format. + bail!( + "currently `--message-format short` is incompatible with {}", + if pipelined { + "pipelined compilation" + } else { + "cached output" + } + ); + } + if pipelined { + cmd.arg("-Zemit-artifact-notifications"); + } + } else { + match cx.bcx.build_config.message_format { + MessageFormat::Human => (), + MessageFormat::Json => { + cmd.arg("--error-format").arg("json"); + } + MessageFormat::Short => { + cmd.arg("--error-format").arg("short"); + } + } + } + Ok(()) +} + +fn build_base_args<'a, 'cfg>( + cx: &mut Context<'a, 'cfg>, + cmd: &mut ProcessBuilder, + unit: &Unit<'a>, + crate_types: &[&str], +) -> CargoResult<()> { + assert!(!unit.mode.is_run_custom_build()); + + let bcx = cx.bcx; + let Profile { + ref opt_level, + ref lto, + codegen_units, + debuginfo, + debug_assertions, + overflow_checks, + rpath, + ref panic, + incremental, + .. + } = unit.profile; + let test = unit.mode.is_any_test(); + + cmd.arg("--crate-name").arg(&unit.target.crate_name()); + + add_path_args(bcx, unit, cmd); + add_color(bcx, cmd); + add_error_format(cx, cmd, cx.rmeta_required(unit), true)?; + + if !test { + for crate_type in crate_types.iter() { + cmd.arg("--crate-type").arg(crate_type); + } + } + + if unit.mode.is_check() { + cmd.arg("--emit=dep-info,metadata"); + } else if !unit.requires_upstream_objects() { + // Always produce metdata files for rlib outputs. Metadata may be used + // in this session for a pipelined compilation, or it may be used in a + // future Cargo session as part of a pipelined compile. + cmd.arg("--emit=dep-info,metadata,link"); + } else { + cmd.arg("--emit=dep-info,link"); + } + + let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) + || (crate_types.contains(&"dylib") && bcx.ws.members().any(|p| p != unit.pkg)); + if prefer_dynamic { + cmd.arg("-C").arg("prefer-dynamic"); + } + + if opt_level.as_str() != "0" { + cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); + } + + if *panic != PanicStrategy::Unwind { + cmd.arg("-C").arg(format!("panic={}", panic)); + } + + // Disable LTO for host builds as prefer_dynamic and it are mutually + // exclusive. + if unit.target.can_lto() && !unit.target.for_host() { + match *lto { + Lto::Bool(false) => {} + Lto::Bool(true) => { + cmd.args(&["-C", "lto"]); + } + Lto::Named(ref s) => { + cmd.arg("-C").arg(format!("lto={}", s)); + } + } + } + + if let Some(n) = codegen_units { + // There are some restrictions with LTO and codegen-units, so we + // only add codegen units when LTO is not used. + cmd.arg("-C").arg(&format!("codegen-units={}", n)); + } + + if let Some(debuginfo) = debuginfo { + cmd.arg("-C").arg(format!("debuginfo={}", debuginfo)); + } + + if let Some(args) = bcx.extra_args_for(unit) { + cmd.args(args); + } + + // `-C overflow-checks` is implied by the setting of `-C debug-assertions`, + // so we only need to provide `-C overflow-checks` if it differs from + // the value of `-C debug-assertions` we would provide. + if opt_level.as_str() != "0" { + if debug_assertions { + cmd.args(&["-C", "debug-assertions=on"]); + if !overflow_checks { + cmd.args(&["-C", "overflow-checks=off"]); + } + } else if overflow_checks { + cmd.args(&["-C", "overflow-checks=on"]); + } + } else if !debug_assertions { + cmd.args(&["-C", "debug-assertions=off"]); + if overflow_checks { + cmd.args(&["-C", "overflow-checks=on"]); + } + } else if !overflow_checks { + cmd.args(&["-C", "overflow-checks=off"]); + } + + if test && unit.target.harness() { + cmd.arg("--test"); + } else if test { + cmd.arg("--cfg").arg("test"); + } + + // We ideally want deterministic invocations of rustc to ensure that + // rustc-caching strategies like sccache are able to cache more, so sort the + // feature list here. + for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) { + if let Some(platform) = bcx.resolve.features(unit.pkg.package_id()).get(feat) { + if bcx.platform_activated(platform.as_ref(), unit.kind) { + cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + } else { + bail!("Failed to get the target for the feature `{}`", feat); + } + } + + match cx.files().metadata(unit) { + Some(m) => { + cmd.arg("-C").arg(&format!("metadata={}", m)); + cmd.arg("-C").arg(&format!("extra-filename=-{}", m)); + } + None => { + cmd.arg("-C") + .arg(&format!("metadata={}", cx.files().target_short_hash(unit))); + } + } + + if rpath { + cmd.arg("-C").arg("rpath"); + } + + cmd.arg("--out-dir").arg(&cx.files().out_dir(unit)); + + fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) { + if let Some(val) = val { + let mut joined = OsString::from(prefix); + joined.push(val); + cmd.arg(key).arg(joined); + } + } + + if unit.kind == Kind::Target { + opt( + cmd, + "--target", + "", + bcx.build_config + .requested_target + .as_ref() + .map(|s| s.as_ref()), + ); + } + + opt(cmd, "-C", "ar=", bcx.ar(unit.kind).map(|s| s.as_ref())); + opt( + cmd, + "-C", + "linker=", + bcx.linker(unit.kind).map(|s| s.as_ref()), + ); + if incremental { + let dir = cx.files().layout(unit.kind).incremental().as_os_str(); + opt(cmd, "-C", "incremental=", Some(dir)); + } + Ok(()) +} + +fn build_deps_args<'a, 'cfg>( + cmd: &mut ProcessBuilder, + cx: &mut Context<'a, 'cfg>, + unit: &Unit<'a>, +) -> CargoResult<()> { + let bcx = cx.bcx; + cmd.arg("-L").arg(&{ + let mut deps = OsString::from("dependency="); + deps.push(cx.files().deps_dir(unit)); + deps + }); + + // Be sure that the host path is also listed. This'll ensure that proc macro + // dependencies are correctly found (for reexported macros). + if let Kind::Target = unit.kind { + cmd.arg("-L").arg(&{ + let mut deps = OsString::from("dependency="); + deps.push(cx.files().host_deps()); + deps + }); + } + + let dep_targets = cx.dep_targets(unit); + + // If there is not one linkable target but should, rustc fails later + // on if there is an `extern crate` for it. This may turn into a hard + // error in the future (see PR #4797). + if !dep_targets + .iter() + .any(|u| !u.mode.is_doc() && u.target.linkable()) + { + if let Some(u) = dep_targets + .iter() + .find(|u| !u.mode.is_doc() && u.target.is_lib()) + { + bcx.config.shell().warn(format!( + "The package `{}` \ + provides no linkable target. The compiler might raise an error while compiling \ + `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ + Cargo.toml. This warning might turn into a hard error in the future.", + u.target.crate_name(), + unit.target.crate_name(), + u.target.crate_name() + ))?; + } + } + + let mut unstable_opts = false; + + for dep in dep_targets { + if dep.mode.is_run_custom_build() { + cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep)); + } + if dep.target.linkable() && !dep.mode.is_doc() { + link_to(cmd, cx, unit, &dep, &mut unstable_opts)?; + } + } + + // This will only be set if we're already using a feature + // requiring nightly rust + if unstable_opts { + cmd.arg("-Z").arg("unstable-options"); + } + + return Ok(()); + + fn link_to<'a, 'cfg>( + cmd: &mut ProcessBuilder, + cx: &mut Context<'a, 'cfg>, + current: &Unit<'a>, + dep: &Unit<'a>, + need_unstable_opts: &mut bool, + ) -> CargoResult<()> { + let bcx = cx.bcx; + + let mut value = OsString::new(); + value.push(bcx.extern_crate_name(current, dep)?); + value.push("="); + + let mut pass = |file| { + let mut value = value.clone(); + value.push(file); + + if current + .pkg + .manifest() + .features() + .require(Feature::public_dependency()) + .is_ok() + && !bcx.is_public_dependency(current, dep) + { + cmd.arg("--extern-private"); + *need_unstable_opts = true; + } else { + cmd.arg("--extern"); + } + + cmd.arg(&value); + }; + + let outputs = cx.outputs(dep)?; + let mut outputs = outputs.iter().filter_map(|output| match output.flavor { + FileFlavor::Linkable { rmeta } => Some((output, rmeta)), + _ => None, + }); + + if cx.only_requires_rmeta(current, dep) { + let (output, _rmeta) = outputs + .find(|(_output, rmeta)| *rmeta) + .expect("failed to find rlib dep for pipelined dep"); + pass(&output.path); + } else { + for (output, rmeta) in outputs { + if !rmeta { + pass(&output.path); + } + } + } + Ok(()) + } +} + +fn envify(s: &str) -> String { + s.chars() + .flat_map(|c| c.to_uppercase()) + .map(|c| if c == '-' { '_' } else { c }) + .collect() +} + +impl Kind { + fn for_target(self, target: &Target) -> Kind { + // Once we start compiling for the `Host` kind we continue doing so, but + // if we are a `Target` kind and then we start compiling for a target + // that needs to be on the host we lift ourselves up to `Host`. + match self { + Kind::Host => Kind::Host, + Kind::Target if target.for_host() => Kind::Host, + Kind::Target => Kind::Target, + } + } +} + +struct OutputOptions { + /// Get the `"rendered"` field from the JSON output and display it on + /// stderr instead of the JSON message. + extract_rendered_messages: bool, + /// Look for JSON message that indicates .rmeta file is available for + /// pipelined compilation. + look_for_metadata_directive: bool, + /// Whether or not to display messages in color. + color: bool, + /// Where to write the JSON messages to support playback later if the unit + /// is fresh. The file is created lazily so that in the normal case, lots + /// of empty files are not created. This is None if caching is disabled. + cache_cell: Option<(PathBuf, LazyCell)>, +} + +impl OutputOptions { + fn new<'a>(cx: &Context<'a, '_>, unit: &Unit<'a>) -> OutputOptions { + let extract_rendered_messages = cx.bcx.build_config.message_format != MessageFormat::Json; + let look_for_metadata_directive = cx.rmeta_required(unit); + let color = cx.bcx.config.shell().supports_color(); + let cache_cell = if cx.bcx.build_config.cache_messages() { + let path = cx.files().message_cache_path(unit); + // Remove old cache, ignore ENOENT, which is the common case. + drop(fs::remove_file(&path)); + Some((path, LazyCell::new())) + } else { + None + }; + OutputOptions { + extract_rendered_messages, + look_for_metadata_directive, + color, + cache_cell, + } + } +} + +fn on_stdout_line( + state: &JobState<'_>, + line: &str, + _package_id: PackageId, + _target: &Target, +) -> CargoResult<()> { + state.stdout(line.to_string()); + Ok(()) +} + +fn on_stderr_line( + state: &JobState<'_>, + line: &str, + package_id: PackageId, + target: &Target, + options: &mut OutputOptions, +) -> CargoResult<()> { + // Check if caching is enabled. + if let Some((path, cell)) = &mut options.cache_cell { + // Cache the output, which will be replayed later when Fresh. + let f = cell.try_borrow_mut_with(|| File::create(path))?; + debug_assert!(!line.contains('\n')); + f.write_all(line.as_bytes())?; + f.write_all(&[b'\n'])?; + } + + // We primarily want to use this function to process JSON messages from + // rustc. The compiler should always print one JSON message per line, and + // otherwise it may have other output intermingled (think RUST_LOG or + // something like that), so skip over everything that doesn't look like a + // JSON message. + if !line.starts_with('{') { + state.stderr(line.to_string()); + return Ok(()); + } + + let mut compiler_message: Box = match serde_json::from_str(line) { + Ok(msg) => msg, + + // If the compiler produced a line that started with `{` but it wasn't + // valid JSON, maybe it wasn't JSON in the first place! Forward it along + // to stderr. + Err(e) => { + debug!("failed to parse json: {:?}", e); + state.stderr(line.to_string()); + return Ok(()); + } + }; + + // In some modes of compilation Cargo switches the compiler to JSON mode + // but the user didn't request that so we still want to print pretty rustc + // colorized diagnostics. In those cases (`extract_rendered_messages`) we + // take a look at the JSON blob we go, see if it's a relevant diagnostics, + // and if so forward just that diagnostic for us to print. + if options.extract_rendered_messages { + #[derive(serde::Deserialize)] + struct CompilerMessage { + rendered: String, + } + if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { + // state.stderr will add a newline + if error.rendered.ends_with('\n') { + error.rendered.pop(); + } + let rendered = if options.color { + error.rendered + } else { + // Strip only fails if the the Writer fails, which is Cursor + // on a Vec, which should never fail. + strip_ansi_escapes::strip(&error.rendered) + .map(|v| String::from_utf8(v).expect("utf8")) + .expect("strip should never fail") + }; + state.stderr(rendered); + return Ok(()); + } + } else { + // Remove color information from the rendered string. rustc has not + // included color in the past, so to avoid breaking anything, strip it + // out when --json-rendered=termcolor is used. This runs + // unconditionally under the assumption that Cargo will eventually + // move to this as the default mode. Perhaps in the future, cargo + // could allow the user to enable/disable color (such as with a + // `--json-rendered` or `--color` or `--message-format` flag). + #[derive(serde::Deserialize, serde::Serialize)] + struct CompilerMessage { + rendered: String, + #[serde(flatten)] + other: std::collections::BTreeMap, + } + if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { + error.rendered = strip_ansi_escapes::strip(&error.rendered) + .map(|v| String::from_utf8(v).expect("utf8")) + .unwrap_or(error.rendered); + let new_line = serde_json::to_string(&error)?; + let new_msg: Box = serde_json::from_str(&new_line)?; + compiler_message = new_msg; + } + } + + // In some modes of execution we will execute rustc with `-Z + // emit-artifact-notifications` to look for metadata files being produced. When this + // happens we may be able to start subsequent compilations more quickly than + // waiting for an entire compile to finish, possibly using more parallelism + // available to complete a compilation session more quickly. + // + // In these cases look for a matching directive and inform Cargo internally + // that a metadata file has been produced. + if options.look_for_metadata_directive { + #[derive(serde::Deserialize)] + struct ArtifactNotification { + artifact: String, + } + if let Ok(artifact) = serde_json::from_str::(compiler_message.get()) { + log::trace!("found directive from rustc: `{}`", artifact.artifact); + if artifact.artifact.ends_with(".rmeta") { + log::debug!("looks like metadata finished early!"); + state.rmeta_produced(); + } + return Ok(()); + } + } + + // And failing all that above we should have a legitimate JSON diagnostic + // from the compiler, so wrap it in an external Cargo JSON message + // indicating which package it came from and then emit it. + let msg = machine_message::FromCompiler { + package_id, + target, + message: compiler_message, + } + .to_json_string(); + + // Switch json lines from rustc/rustdoc that appear on stderr to stdout + // instead. We want the stdout of Cargo to always be machine parseable as + // stderr has our colorized human-readable messages. + state.stdout(msg); + Ok(()) +} + +fn replay_output_cache( + package_id: PackageId, + target: &Target, + path: PathBuf, + format: MessageFormat, + color: bool, +) -> Work { + let target = target.clone(); + let extract_rendered_messages = match format { + MessageFormat::Human => true, + MessageFormat::Json => false, + // FIXME: short not supported. + MessageFormat::Short => false, + }; + let mut options = OutputOptions { + extract_rendered_messages, + look_for_metadata_directive: false, + color, + cache_cell: None, + }; + Work::new(move |state| { + if !path.exists() { + // No cached output, probably didn't emit anything. + return Ok(()); + } + let contents = fs::read_to_string(&path)?; + for line in contents.lines() { + on_stderr_line(state, line, package_id, &target, &mut options)?; + } + Ok(()) + }) +} diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs new file mode 100644 index 00000000000..86cb7b2180d --- /dev/null +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -0,0 +1,136 @@ +use std::collections::{BTreeSet, HashSet}; +use std::fs::File; +use std::io::{BufWriter, Write}; +use std::path::{Path, PathBuf}; + +use log::debug; + +use super::{fingerprint, Context, FileFlavor, Unit}; +use crate::util::paths; +use crate::util::{internal, CargoResult}; + +fn render_filename>(path: P, basedir: Option<&str>) -> CargoResult { + let path = path.as_ref(); + let relpath = match basedir { + None => path, + Some(base) => match path.strip_prefix(base) { + Ok(relpath) => relpath, + _ => path, + }, + }; + relpath + .to_str() + .ok_or_else(|| internal("path not utf-8")) + .map(|f| f.replace(" ", "\\ ")) +} + +fn add_deps_for_unit<'a, 'b>( + deps: &mut BTreeSet, + context: &mut Context<'a, 'b>, + unit: &Unit<'a>, + visited: &mut HashSet>, +) -> CargoResult<()> { + if !visited.insert(*unit) { + return Ok(()); + } + + // units representing the execution of a build script don't actually + // generate a dep info file, so we just keep on going below + if !unit.mode.is_run_custom_build() { + // Add dependencies from rustc dep-info output (stored in fingerprint directory) + let dep_info_loc = fingerprint::dep_info_loc(context, unit); + if let Some(paths) = fingerprint::parse_dep_info( + unit.pkg.root(), + context.files().host_root(), + &dep_info_loc, + )? { + for path in paths { + deps.insert(path); + } + } else { + debug!( + "can't find dep_info for {:?} {}", + unit.pkg.package_id(), + unit.target + ); + return Err(internal("dep_info missing")); + } + } + + // Add rerun-if-changed dependencies + let key = (unit.pkg.package_id(), unit.kind); + if let Some(output) = context.build_state.outputs.lock().unwrap().get(&key) { + for path in &output.rerun_if_changed { + deps.insert(path.into()); + } + } + + // Recursively traverse all transitive dependencies + for dep_unit in context.dep_targets(unit).iter() { + let source_id = dep_unit.pkg.package_id().source_id(); + if source_id.is_path() { + add_deps_for_unit(deps, context, dep_unit, visited)?; + } + } + Ok(()) +} + +pub fn output_depinfo<'a, 'b>(cx: &mut Context<'a, 'b>, unit: &Unit<'a>) -> CargoResult<()> { + let bcx = cx.bcx; + let mut deps = BTreeSet::new(); + let mut visited = HashSet::new(); + let success = add_deps_for_unit(&mut deps, cx, unit, &mut visited).is_ok(); + let basedir_string; + let basedir = match bcx.config.get_path("build.dep-info-basedir")? { + Some(value) => { + basedir_string = value + .val + .as_os_str() + .to_str() + .ok_or_else(|| internal("build.dep-info-basedir path not utf-8"))? + .to_string(); + Some(basedir_string.as_str()) + } + None => None, + }; + let deps = deps + .iter() + .map(|f| render_filename(f, basedir)) + .collect::>>()?; + + for output in cx + .outputs(unit)? + .iter() + .filter(|o| o.flavor != FileFlavor::DebugInfo) + { + if let Some(ref link_dst) = output.hardlink { + let output_path = link_dst.with_extension("d"); + if success { + let target_fn = render_filename(link_dst, basedir)?; + + // If nothing changed don't recreate the file which could alter + // its mtime + if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) { + if previous.len() == 1 && previous[0].0 == target_fn && previous[0].1 == deps { + continue; + } + } + + // Otherwise write it all out + let mut outfile = BufWriter::new(File::create(output_path)?); + write!(outfile, "{}:", target_fn)?; + for dep in &deps { + write!(outfile, " {}", dep)?; + } + writeln!(outfile)?; + + // dep-info generation failed, so delete output file. This will + // usually cause the build system to always rerun the build + // rule, which is correct if inefficient. + } else if output_path.exists() { + paths::remove_file(output_path)?; + } + } + } + Ok(()) +} diff --git a/src/cargo/core/compiler/unit.rs b/src/cargo/core/compiler/unit.rs new file mode 100644 index 00000000000..00c9841cc14 --- /dev/null +++ b/src/cargo/core/compiler/unit.rs @@ -0,0 +1,183 @@ +use crate::core::compiler::{CompileMode, Kind}; +use crate::core::{profiles::Profile, Package, Target}; +use crate::util::hex::short_hash; +use std::cell::RefCell; +use std::collections::HashSet; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::ops::Deref; + +/// All information needed to define a unit. +/// +/// A unit is an object that has enough information so that cargo knows how to build it. +/// For example, if your package has dependencies, then every dependency will be built as a library +/// unit. If your package is a library, then it will be built as a library unit as well, or if it +/// is a binary with `main.rs`, then a binary will be output. There are also separate unit types +/// for `test`ing and `check`ing, amongst others. +/// +/// The unit also holds information about all possible metadata about the package in `pkg`. +/// +/// A unit needs to know extra information in addition to the type and root source file. For +/// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know +/// whether you want a debug or release build. There is enough information in this struct to figure +/// all that out. +#[derive(Clone, Copy, PartialOrd, Ord)] +pub struct Unit<'a> { + inner: &'a UnitInner<'a>, +} + +/// Internal fields of `Unit` which `Unit` will dereference to. +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct UnitInner<'a> { + /// Information about available targets, which files to include/exclude, etc. Basically stuff in + /// `Cargo.toml`. + pub pkg: &'a Package, + /// Information about the specific target to build, out of the possible targets in `pkg`. Not + /// to be confused with *target-triple* (or *target architecture* ...), the target arch for a + /// build. + pub target: &'a Target, + /// The profile contains information about *how* the build should be run, including debug + /// level, etc. + pub profile: Profile, + /// Whether this compilation unit is for the host or target architecture. + /// + /// For example, when + /// cross compiling and using a custom build script, the build script needs to be compiled for + /// the host architecture so the host rustc can use it (when compiling to the target + /// architecture). + pub kind: Kind, + /// The "mode" this unit is being compiled for. See [`CompileMode`] for more details. + pub mode: CompileMode, +} + +impl UnitInner<'_> { + /// Returns whether compilation of this unit requires all upstream artifacts + /// to be available. + /// + /// This effectively means that this unit is a synchronization point (if the + /// return value is `true`) that all previously pipelined units need to + /// finish in their entirety before this one is started. + pub fn requires_upstream_objects(&self) -> bool { + self.mode.is_any_test() || self.target.kind().requires_upstream_objects() + } +} + +impl<'a> Unit<'a> { + pub fn buildkey(&self) -> String { + format!("{}-{}", self.pkg.name(), short_hash(self)) + } +} + +// Just hash the pointer for fast hashing +impl<'a> Hash for Unit<'a> { + fn hash(&self, hasher: &mut H) { + (self.inner as *const UnitInner<'a>).hash(hasher) + } +} + +// Just equate the pointer since these are interned +impl<'a> PartialEq for Unit<'a> { + fn eq(&self, other: &Unit<'a>) -> bool { + self.inner as *const UnitInner<'a> == other.inner as *const UnitInner<'a> + } +} + +impl<'a> Eq for Unit<'a> {} + +impl<'a> Deref for Unit<'a> { + type Target = UnitInner<'a>; + + fn deref(&self) -> &UnitInner<'a> { + self.inner + } +} + +impl<'a> fmt::Debug for Unit<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Unit") + .field("pkg", &self.pkg) + .field("target", &self.target) + .field("profile", &self.profile) + .field("kind", &self.kind) + .field("mode", &self.mode) + .finish() + } +} + +/// A small structure used to "intern" `Unit` values. +/// +/// A `Unit` is just a thin pointer to an internal `UnitInner`. This is done to +/// ensure that `Unit` itself is quite small as well as enabling a very +/// efficient hash/equality implementation for `Unit`. All units are +/// manufactured through an interner which guarantees that each equivalent value +/// is only produced once. +pub struct UnitInterner<'a> { + state: RefCell>, +} + +struct InternerState<'a> { + cache: HashSet>>, +} + +impl<'a> UnitInterner<'a> { + /// Creates a new blank interner + pub fn new() -> UnitInterner<'a> { + UnitInterner { + state: RefCell::new(InternerState { + cache: HashSet::new(), + }), + } + } + + /// Creates a new `unit` from its components. The returned `Unit`'s fields + /// will all be equivalent to the provided arguments, although they may not + /// be the exact same instance. + pub fn intern( + &'a self, + pkg: &'a Package, + target: &'a Target, + profile: Profile, + kind: Kind, + mode: CompileMode, + ) -> Unit<'a> { + let inner = self.intern_inner(&UnitInner { + pkg, + target, + profile, + kind, + mode, + }); + Unit { inner } + } + + // Ok so interning here is a little unsafe, hence the usage of `unsafe` + // internally. The primary issue here is that we've got an internal cache of + // `UnitInner` instances added so far, but we may need to mutate it to add + // it, and the mutation for an interner happens behind a shared borrow. + // + // Our goal though is to escape the lifetime `borrow_mut` to the same + // lifetime as the borrowed passed into this function. That's where `unsafe` + // comes into play. What we're subverting here is resizing internally in the + // `HashSet` as well as overwriting previous keys in the `HashSet`. + // + // As a result we store `Box` internally to have an extra layer + // of indirection. That way `*const UnitInner` is a stable address that + // doesn't change with `HashSet` resizing. Furthermore we're careful to + // never overwrite an entry once inserted. + // + // Ideally we'd use an off-the-shelf interner from crates.io which avoids a + // small amount of unsafety here, but at the time this was written one + // wasn't obviously available. + fn intern_inner(&'a self, item: &UnitInner<'a>) -> &'a UnitInner<'a> { + let mut me = self.state.borrow_mut(); + if let Some(item) = me.cache.get(item) { + // note that `item` has type `&Box`. Use `&**` to + // convert that to `&UnitInner<'a>`, then do some trickery to extend + // the lifetime to the `'a` on the function here. + return unsafe { &*(&**item as *const UnitInner<'a>) }; + } + me.cache.insert(Box::new(item.clone())); + let item = me.cache.get(item).unwrap(); + unsafe { &*(&**item as *const UnitInner<'a>) } + } +} diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index aa74490fef3..27859338666 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -1,235 +1,451 @@ +use std::rc::Rc; + +use log::trace; +use semver::ReqParseError; use semver::VersionReq; +use serde::ser; +use serde::Serialize; +use url::Url; -use core::{SourceId, Summary, PackageId}; -use std::rc::Rc; -use util::CargoResult; +use crate::core::interning::InternedString; +use crate::core::{PackageId, SourceId, Summary}; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{Config, Platform}; -/// The data underlying a Dependency. -#[derive(PartialEq,Clone,Debug)] -pub struct DependencyInner { - name: String, +/// Information about a dependency requested by a Cargo manifest. +/// Cheap to copy. +#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug)] +pub struct Dependency { + inner: Rc, +} + +/// The data underlying a `Dependency`. +#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug)] +struct Inner { + name: InternedString, source_id: SourceId, + /// Source ID for the registry as specified in the manifest. + /// + /// This will be None if it is not specified (crates.io dependency). + /// This is different from `source_id` for example when both a `path` and + /// `registry` is specified. Or in the case of a crates.io dependency, + /// `source_id` will be crates.io and this will be None. + registry_id: Option, req: VersionReq, - specified_req: Option, + specified_req: bool, kind: Kind, only_match_name: bool, + explicit_name_in_toml: Option, optional: bool, + public: bool, default_features: bool, - features: Vec, + features: Vec, // This dependency should be used only for this platform. // `None` means *all platforms*. - only_for_platform: Option, + platform: Option, } -/// Information about a dependency requested by a Cargo manifest. -/// Cheap to copy. -#[derive(PartialEq,Clone,Debug)] -pub struct Dependency { - inner: Rc, +#[derive(Serialize)] +struct SerializedDependency<'a> { + name: &'a str, + source: SourceId, + req: String, + kind: Kind, + rename: Option<&'a str>, + + optional: bool, + uses_default_features: bool, + features: &'a [InternedString], + target: Option<&'a Platform>, + /// The registry URL this dependency is from. + /// If None, then it comes from the default registry (crates.io). + registry: Option, +} + +impl ser::Serialize for Dependency { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + SerializedDependency { + name: &*self.package_name(), + source: self.source_id(), + req: self.version_req().to_string(), + kind: self.kind(), + optional: self.is_optional(), + uses_default_features: self.uses_default_features(), + features: self.features(), + target: self.platform(), + rename: self.explicit_name_in_toml().map(|s| s.as_str()), + registry: self.registry_id().map(|sid| sid.url().clone()), + } + .serialize(s) + } } -#[derive(PartialEq, Clone, Debug, Copy)] +#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug, Copy)] pub enum Kind { Normal, Development, Build, } -impl DependencyInner { +fn parse_req_with_deprecated( + name: &str, + req: &str, + extra: Option<(PackageId, &Config)>, +) -> CargoResult { + match VersionReq::parse(req) { + Err(ReqParseError::DeprecatedVersionRequirement(requirement)) => { + let (inside, config) = match extra { + Some(pair) => pair, + None => return Err(ReqParseError::DeprecatedVersionRequirement(requirement).into()), + }; + let msg = format!( + "\ +parsed version requirement `{}` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of {} {}, and the correct version requirement is `{}`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. +", + req, + inside.name(), + inside.version(), + requirement + ); + config.shell().warn(&msg)?; + + Ok(requirement) + } + Err(e) => { + let err: CargoResult = Err(e.into()); + let v: VersionReq = err.chain_err(|| { + format!( + "failed to parse the version requirement `{}` for dependency `{}`", + req, name + ) + })?; + Ok(v) + } + Ok(v) => Ok(v), + } +} + +impl ser::Serialize for Kind { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + match *self { + Kind::Normal => None, + Kind::Development => Some("dev"), + Kind::Build => Some("build"), + } + .serialize(s) + } +} + +impl Dependency { + /// Attempt to create a `Dependency` from an entry in the manifest. + pub fn parse( + name: &str, + version: Option<&str>, + source_id: SourceId, + inside: PackageId, + config: &Config, + ) -> CargoResult { + let arg = Some((inside, config)); + let (specified_req, version_req) = match version { + Some(v) => (true, parse_req_with_deprecated(name, v, arg)?), + None => (false, VersionReq::any()), + }; + + let mut ret = Dependency::new_override(name, source_id); + { + let ptr = Rc::make_mut(&mut ret.inner); + ptr.only_match_name = false; + ptr.req = version_req; + ptr.specified_req = specified_req; + } + Ok(ret) + } + /// Attempt to create a `Dependency` from an entry in the manifest. - pub fn parse(name: &str, - version: Option<&str>, - source_id: &SourceId) -> CargoResult { - let version_req = match version { - Some(v) => try!(VersionReq::parse(v)), - None => VersionReq::any() + pub fn parse_no_deprecated( + name: &str, + version: Option<&str>, + source_id: SourceId, + ) -> CargoResult { + let (specified_req, version_req) = match version { + Some(v) => (true, parse_req_with_deprecated(name, v, None)?), + None => (false, VersionReq::any()), }; - Ok(DependencyInner { - only_match_name: false, - req: version_req, - specified_req: version.map(|s| s.to_string()), - .. DependencyInner::new_override(name, source_id) - }) - } - - pub fn new_override(name: &str, source_id: &SourceId) -> DependencyInner { - DependencyInner { - name: name.to_string(), - source_id: source_id.clone(), - req: VersionReq::any(), - kind: Kind::Normal, - only_match_name: true, - optional: false, - features: Vec::new(), - default_features: true, - specified_req: None, - only_for_platform: None, + let mut ret = Dependency::new_override(name, source_id); + { + let ptr = Rc::make_mut(&mut ret.inner); + ptr.only_match_name = false; + ptr.req = version_req; + ptr.specified_req = specified_req; + } + Ok(ret) + } + + pub fn new_override(name: &str, source_id: SourceId) -> Dependency { + assert!(!name.is_empty()); + Dependency { + inner: Rc::new(Inner { + name: InternedString::new(name), + source_id, + registry_id: None, + req: VersionReq::any(), + kind: Kind::Normal, + only_match_name: true, + optional: false, + public: false, + features: Vec::new(), + default_features: true, + specified_req: false, + platform: None, + explicit_name_in_toml: None, + }), } } - pub fn version_req(&self) -> &VersionReq { &self.req } - pub fn name(&self) -> &str { &self.name } - pub fn source_id(&self) -> &SourceId { &self.source_id } - pub fn kind(&self) -> Kind { self.kind } - pub fn specified_req(&self) -> Option<&str> { - self.specified_req.as_ref().map(|s| &s[..]) + pub fn version_req(&self) -> &VersionReq { + &self.inner.req + } + + /// This is the name of this `Dependency` as listed in `Cargo.toml`. + /// + /// Or in other words, this is what shows up in the `[dependencies]` section + /// on the left hand side. This is *not* the name of the package that's + /// being depended on as the dependency can be renamed. For that use + /// `package_name` below. + /// + /// Both of the dependencies below return `foo` for `name_in_toml`: + /// + /// ```toml + /// [dependencies] + /// foo = "0.1" + /// ``` + /// + /// and ... + /// + /// ```toml + /// [dependencies] + /// foo = { version = "0.1", package = 'bar' } + /// ``` + pub fn name_in_toml(&self) -> InternedString { + self.explicit_name_in_toml().unwrap_or(self.inner.name) + } + + /// The name of the package that this `Dependency` depends on. + /// + /// Usually this is what's written on the left hand side of a dependencies + /// section, but it can also be renamed via the `package` key. + /// + /// Both of the dependencies below return `foo` for `package_name`: + /// + /// ```toml + /// [dependencies] + /// foo = "0.1" + /// ``` + /// + /// and ... + /// + /// ```toml + /// [dependencies] + /// bar = { version = "0.1", package = 'foo' } + /// ``` + pub fn package_name(&self) -> InternedString { + self.inner.name + } + + pub fn source_id(&self) -> SourceId { + self.inner.source_id + } + + pub fn registry_id(&self) -> Option { + self.inner.registry_id + } + + pub fn set_registry_id(&mut self, registry_id: SourceId) -> &mut Dependency { + Rc::make_mut(&mut self.inner).registry_id = Some(registry_id); + self + } + + pub fn kind(&self) -> Kind { + self.inner.kind + } + + pub fn is_public(&self) -> bool { + self.inner.public + } + + /// Sets whether the dependency is public. + pub fn set_public(&mut self, public: bool) -> &mut Dependency { + if public { + // Setting 'public' only makes sense for normal dependencies + assert_eq!(self.kind(), Kind::Normal); + } + Rc::make_mut(&mut self.inner).public = public; + self + } + + pub fn specified_req(&self) -> bool { + self.inner.specified_req } /// If none, this dependencies must be built for all platforms. /// If some, it must only be built for the specified platform. - pub fn only_for_platform(&self) -> Option<&str> { - self.only_for_platform.as_ref().map(|s| &s[..]) + pub fn platform(&self) -> Option<&Platform> { + self.inner.platform.as_ref() + } + + /// The renamed name of this dependency, if any. + /// + /// If the `package` key is used in `Cargo.toml` then this returns the same + /// value as `name_in_toml`. + pub fn explicit_name_in_toml(&self) -> Option { + self.inner.explicit_name_in_toml } - pub fn set_kind(mut self, kind: Kind) -> DependencyInner { - self.kind = kind; + pub fn set_kind(&mut self, kind: Kind) -> &mut Dependency { + if self.is_public() { + // Setting 'public' only makes sense for normal dependencies + assert_eq!(kind, Kind::Normal); + } + Rc::make_mut(&mut self.inner).kind = kind; self } /// Sets the list of features requested for the package. - pub fn set_features(mut self, features: Vec) -> DependencyInner { - self.features = features; + pub fn set_features( + &mut self, + features: impl IntoIterator>, + ) -> &mut Dependency { + Rc::make_mut(&mut self.inner).features = features + .into_iter() + .map(|s| InternedString::new(s.as_ref())) + .collect(); self } /// Sets whether the dependency requests default features of the package. - pub fn set_default_features(mut self, default_features: bool) -> DependencyInner { - self.default_features = default_features; + pub fn set_default_features(&mut self, default_features: bool) -> &mut Dependency { + Rc::make_mut(&mut self.inner).default_features = default_features; self } /// Sets whether the dependency is optional. - pub fn set_optional(mut self, optional: bool) -> DependencyInner { - self.optional = optional; + pub fn set_optional(&mut self, optional: bool) -> &mut Dependency { + Rc::make_mut(&mut self.inner).optional = optional; + self + } + + /// Sets the source ID for this dependency. + pub fn set_source_id(&mut self, id: SourceId) -> &mut Dependency { + Rc::make_mut(&mut self.inner).source_id = id; self } - /// Set the source id for this dependency - pub fn set_source_id(mut self, id: SourceId) -> DependencyInner { - self.source_id = id; + /// Sets the version requirement for this dependency. + pub fn set_version_req(&mut self, req: VersionReq) -> &mut Dependency { + Rc::make_mut(&mut self.inner).req = req; self } - /// Set the version requirement for this dependency - pub fn set_version_req(mut self, req: VersionReq) -> DependencyInner { - self.req = req; + pub fn set_platform(&mut self, platform: Option) -> &mut Dependency { + Rc::make_mut(&mut self.inner).platform = platform; self } - pub fn set_only_for_platform(mut self, platform: Option) - -> DependencyInner { - self.only_for_platform = platform; + pub fn set_explicit_name_in_toml(&mut self, name: &str) -> &mut Dependency { + Rc::make_mut(&mut self.inner).explicit_name_in_toml = Some(InternedString::new(name)); self } - /// Lock this dependency to depending on the specified package id - pub fn lock_to(self, id: &PackageId) -> DependencyInner { - assert_eq!(self.source_id, *id.source_id()); - assert!(self.req.matches(id.version())); + /// Locks this dependency to depending on the specified package ID. + pub fn lock_to(&mut self, id: PackageId) -> &mut Dependency { + assert_eq!(self.inner.source_id, id.source_id()); + assert!(self.inner.req.matches(id.version())); + trace!( + "locking dep from `{}` with `{}` at {} to {}", + self.package_name(), + self.version_req(), + self.source_id(), + id + ); self.set_version_req(VersionReq::exact(id.version())) - .set_source_id(id.source_id().clone()) + .set_source_id(id.source_id()) + } + + /// Returns `true` if this is a "locked" dependency, basically whether it has + /// an exact version req. + pub fn is_locked(&self) -> bool { + // Kind of a hack to figure this out, but it works! + self.inner.req.to_string().starts_with('=') } - /// Returns false if the dependency is only used to build the local package. + /// Returns `false` if the dependency is only used to build the local package. pub fn is_transitive(&self) -> bool { - match self.kind { + match self.inner.kind { Kind::Normal | Kind::Build => true, Kind::Development => false, } } - pub fn is_build(&self) -> bool { - match self.kind { Kind::Build => true, _ => false } - } - pub fn is_optional(&self) -> bool { self.optional } - /// Returns true if the default features of the dependency are requested. - pub fn uses_default_features(&self) -> bool { self.default_features } - /// Returns the list of features that are requested by the dependency. - pub fn features(&self) -> &[String] { &self.features } - - /// Returns true if the package (`sum`) can fulfill this dependency request. - pub fn matches(&self, sum: &Summary) -> bool { - self.matches_id(sum.package_id()) - } - - /// Returns true if the package (`id`) can fulfill this dependency request. - pub fn matches_id(&self, id: &PackageId) -> bool { - self.name == id.name() && - (self.only_match_name || (self.req.matches(id.version()) && - &self.source_id == id.source_id())) - } - pub fn into_dependency(self) -> Dependency { - Dependency {inner: Rc::new(self)} + pub fn is_build(&self) -> bool { + match self.inner.kind { + Kind::Build => true, + _ => false, + } } -} -impl Dependency { - /// Attempt to create a `Dependency` from an entry in the manifest. - pub fn parse(name: &str, - version: Option<&str>, - source_id: &SourceId) -> CargoResult { - DependencyInner::parse(name, version, source_id).map(|di| { - di.into_dependency() - }) + pub fn is_optional(&self) -> bool { + self.inner.optional } - pub fn new_override(name: &str, source_id: &SourceId) -> Dependency { - DependencyInner::new_override(name, source_id).into_dependency() + /// Returns `true` if the default features of the dependency are requested. + pub fn uses_default_features(&self) -> bool { + self.inner.default_features } - - pub fn clone_inner(&self) -> DependencyInner { (*self.inner).clone() } - - pub fn version_req(&self) -> &VersionReq { self.inner.version_req() } - pub fn name(&self) -> &str { self.inner.name() } - pub fn source_id(&self) -> &SourceId { self.inner.source_id() } - pub fn kind(&self) -> Kind { self.inner.kind() } - pub fn specified_req(&self) -> Option<&str> { self.inner.specified_req() } - - /// If none, this dependencies must be built for all platforms. - /// If some, it must only be built for the specified platform. - pub fn only_for_platform(&self) -> Option<&str> { - self.inner.only_for_platform() + /// Returns the list of features that are requested by the dependency. + pub fn features(&self) -> &[InternedString] { + &self.inner.features } - /// Lock this dependency to depending on the specified package id - pub fn lock_to(self, id: &PackageId) -> Dependency { - self.clone_inner().lock_to(id).into_dependency() + /// Returns `true` if the package (`sum`) can fulfill this dependency request. + pub fn matches(&self, sum: &Summary) -> bool { + self.matches_id(sum.package_id()) } - /// Returns false if the dependency is only used to build the local package. - pub fn is_transitive(&self) -> bool { self.inner.is_transitive() } - pub fn is_build(&self) -> bool { self.inner.is_build() } - pub fn is_optional(&self) -> bool { self.inner.is_optional() } - /// Returns true if the default features of the dependency are requested. - pub fn uses_default_features(&self) -> bool { - self.inner.uses_default_features() + /// Returns `true` if the package (`id`) can fulfill this dependency request. + pub fn matches_ignoring_source(&self, id: PackageId) -> bool { + self.package_name() == id.name() && self.version_req().matches(id.version()) } - /// Returns the list of features that are requested by the dependency. - pub fn features(&self) -> &[String] { self.inner.features() } - - /// Returns true if the package (`sum`) can fulfill this dependency request. - pub fn matches(&self, sum: &Summary) -> bool { self.inner.matches(sum) } - /// Returns true if the package (`id`) can fulfill this dependency request. - pub fn matches_id(&self, id: &PackageId) -> bool { - self.inner.matches_id(id) + /// Returns `true` if the package (`id`) can fulfill this dependency request. + pub fn matches_id(&self, id: PackageId) -> bool { + self.inner.name == id.name() + && (self.inner.only_match_name + || (self.inner.req.matches(id.version()) && self.inner.source_id == id.source_id())) } -} -#[derive(PartialEq,Clone,RustcEncodable)] -pub struct SerializedDependency { - name: String, - req: String -} - -impl SerializedDependency { - pub fn from_dependency(dep: &Dependency) -> SerializedDependency { - SerializedDependency { - name: dep.name().to_string(), - req: dep.version_req().to_string() + pub fn map_source(mut self, to_replace: SourceId, replace_with: SourceId) -> Dependency { + if self.source_id() != to_replace { + self + } else { + self.set_source_id(replace_with); + self } } } diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs new file mode 100644 index 00000000000..9fd8161ce35 --- /dev/null +++ b/src/cargo/core/features.rs @@ -0,0 +1,480 @@ +//! Support for nightly features in Cargo itself. +//! +//! This file is the version of `feature_gate.rs` in upstream Rust for Cargo +//! itself and is intended to be the avenue for which new features in Cargo are +//! gated by default and then eventually stabilized. All known stable and +//! unstable features are tracked in this file. +//! +//! If you're reading this then you're likely interested in adding a feature to +//! Cargo, and the good news is that it shouldn't be too hard! To do this you'll +//! want to follow these steps: +//! +//! 1. Add your feature. Do this by searching for "look here" in this file and +//! expanding the macro invocation that lists all features with your new +//! feature. +//! +//! 2. Find the appropriate place to place the feature gate in Cargo itself. If +//! you're extending the manifest format you'll likely just want to modify +//! the `Manifest::feature_gate` function, but otherwise you may wish to +//! place the feature gate elsewhere in Cargo. +//! +//! 3. To actually perform the feature gate, you'll want to have code that looks +//! like: +//! +//! ```rust,ignore +//! use core::{Feature, Features}; +//! +//! let feature = Feature::launch_into_space(); +//! package.manifest().features().require(feature).chain_err(|| { +//! "launching Cargo into space right now is unstable and may result in \ +//! unintended damage to your codebase, use with caution" +//! })?; +//! ``` +//! +//! Notably you'll notice the `require` function called with your `Feature`, and +//! then you use `chain_err` to tack on more context for why the feature was +//! required when the feature isn't activated. +//! +//! 4. Update the unstable documentation at +//! `src/doc/src/reference/unstable.md` to include a short description of +//! how to use your new feature. When the feature is stabilized, be sure +//! that the Cargo Guide or Reference is updated to fully document the +//! feature and remove the entry from the Unstable section. +//! +//! And hopefully that's it! Bear with us though that this is, at the time of +//! this writing, a very new feature in Cargo. If the process differs from this +//! we'll be sure to update this documentation! + +use std::cell::Cell; +use std::env; +use std::fmt; +use std::str::FromStr; + +use failure::Error; +use serde::{Deserialize, Serialize}; + +use crate::util::errors::CargoResult; + +pub const SEE_CHANNELS: &str = + "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \ + about Rust release channels."; + +/// The edition of the compiler (RFC 2052) +#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)] +pub enum Edition { + /// The 2015 edition + Edition2015, + /// The 2018 edition + Edition2018, +} + +impl fmt::Display for Edition { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Edition::Edition2015 => f.write_str("2015"), + Edition::Edition2018 => f.write_str("2018"), + } + } +} +impl FromStr for Edition { + type Err = Error; + fn from_str(s: &str) -> Result { + match s { + "2015" => Ok(Edition::Edition2015), + "2018" => Ok(Edition::Edition2018), + s => failure::bail!( + "supported edition values are `2015` or `2018`, but `{}` \ + is unknown", + s + ), + } + } +} + +#[derive(PartialEq)] +enum Status { + Stable, + Unstable, +} + +macro_rules! features { + ( + pub struct Features { + $([$stab:ident] $feature:ident: bool,)* + } + ) => ( + #[derive(Default, Clone, Debug)] + pub struct Features { + $($feature: bool,)* + activated: Vec, + } + + impl Feature { + $( + pub fn $feature() -> &'static Feature { + fn get(features: &Features) -> bool { + stab!($stab) == Status::Stable || features.$feature + } + static FEAT: Feature = Feature { + name: stringify!($feature), + get, + }; + &FEAT + } + )* + + fn is_enabled(&self, features: &Features) -> bool { + (self.get)(features) + } + } + + impl Features { + fn status(&mut self, feature: &str) -> Option<(&mut bool, Status)> { + if feature.contains("_") { + return None + } + let feature = feature.replace("-", "_"); + $( + if feature == stringify!($feature) { + return Some((&mut self.$feature, stab!($stab))) + } + )* + None + } + } + ) +} + +macro_rules! stab { + (stable) => { + Status::Stable + }; + (unstable) => { + Status::Unstable + }; +} + +// A listing of all features in Cargo. +// +// "look here" +// +// This is the macro that lists all stable and unstable features in Cargo. +// You'll want to add to this macro whenever you add a feature to Cargo, also +// following the directions above. +// +// Note that all feature names here are valid Rust identifiers, but the `_` +// character is translated to `-` when specified in the `cargo-features` +// manifest entry in `Cargo.toml`. +features! { + pub struct Features { + + // A dummy feature that doesn't actually gate anything, but it's used in + // testing to ensure that we can enable stable features. + [stable] test_dummy_stable: bool, + + // A dummy feature that gates the usage of the `im-a-teapot` manifest + // entry. This is basically just intended for tests. + [unstable] test_dummy_unstable: bool, + + // Downloading packages from alternative registry indexes. + [stable] alternative_registries: bool, + + // Using editions + [stable] edition: bool, + + // Renaming a package in the manifest via the `package` key + [stable] rename_dependency: bool, + + // Whether a lock file is published with this crate + // This is deprecated, and will likely be removed in a future version. + [unstable] publish_lockfile: bool, + + // Overriding profiles for dependencies. + [unstable] profile_overrides: bool, + + // Separating the namespaces for features and dependencies + [unstable] namespaced_features: bool, + + // "default-run" manifest option, + [stable] default_run: bool, + + // Declarative build scripts. + [unstable] metabuild: bool, + + // Specifying the 'public' attribute on dependencies + [unstable] public_dependency: bool, + } +} + +pub struct Feature { + name: &'static str, + get: fn(&Features) -> bool, +} + +impl Features { + pub fn new(features: &[String], warnings: &mut Vec) -> CargoResult { + let mut ret = Features::default(); + for feature in features { + ret.add(feature, warnings)?; + ret.activated.push(feature.to_string()); + } + Ok(ret) + } + + fn add(&mut self, feature: &str, warnings: &mut Vec) -> CargoResult<()> { + let (slot, status) = match self.status(feature) { + Some(p) => p, + None => failure::bail!("unknown cargo feature `{}`", feature), + }; + + if *slot { + failure::bail!("the cargo feature `{}` has already been activated", feature); + } + + match status { + Status::Stable => { + let warning = format!( + "the cargo feature `{}` is now stable \ + and is no longer necessary to be listed \ + in the manifest", + feature + ); + warnings.push(warning); + } + Status::Unstable if !nightly_features_allowed() => failure::bail!( + "the cargo feature `{}` requires a nightly version of \ + Cargo, but this is the `{}` channel\n\ + {}", + feature, + channel(), + SEE_CHANNELS + ), + Status::Unstable => {} + } + + *slot = true; + + Ok(()) + } + + pub fn activated(&self) -> &[String] { + &self.activated + } + + pub fn require(&self, feature: &Feature) -> CargoResult<()> { + if feature.is_enabled(self) { + Ok(()) + } else { + let feature = feature.name.replace("_", "-"); + let mut msg = format!("feature `{}` is required", feature); + + if nightly_features_allowed() { + let s = format!( + "\n\nconsider adding `cargo-features = [\"{0}\"]` \ + to the manifest", + feature + ); + msg.push_str(&s); + } else { + let s = format!( + "\n\n\ + this Cargo does not support nightly features, but if you\n\ + switch to nightly channel you can add\n\ + `cargo-features = [\"{}\"]` to enable this feature", + feature + ); + msg.push_str(&s); + } + failure::bail!("{}", msg); + } + } + + pub fn is_enabled(&self, feature: &Feature) -> bool { + feature.is_enabled(self) + } +} + +/// A parsed representation of all unstable flags that Cargo accepts. +/// +/// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for +/// gating unstable functionality to Cargo. These flags are only available on +/// the nightly channel of Cargo. +/// +/// This struct doesn't have quite the same convenience macro that the features +/// have above, but the procedure should still be relatively stable for adding a +/// new unstable flag: +/// +/// 1. First, add a field to this `CliUnstable` structure. All flags are allowed +/// to have a value as the `-Z` flags are either of the form `-Z foo` or +/// `-Z foo=bar`, and it's up to you how to parse `bar`. +/// +/// 2. Add an arm to the match statement in `CliUnstable::add` below to match on +/// your new flag. The key (`k`) is what you're matching on and the value is +/// in `v`. +/// +/// 3. (optional) Add a new parsing function to parse your datatype. As of now +/// there's an example for `bool`, but more can be added! +/// +/// 4. In Cargo use `config.cli_unstable()` to get a reference to this structure +/// and then test for your flag or your value and act accordingly. +/// +/// If you have any trouble with this, please let us know! +#[derive(Default, Debug)] +pub struct CliUnstable { + pub print_im_a_teapot: bool, + pub unstable_options: bool, + pub no_index_update: bool, + pub avoid_dev_deps: bool, + pub minimal_versions: bool, + pub package_features: bool, + pub advanced_env: bool, + pub config_profile: bool, + pub dual_proc_macros: bool, + pub mtime_on_use: bool, + pub install_upgrade: bool, + pub cache_messages: bool, + pub binary_dep_depinfo: bool, +} + +impl CliUnstable { + pub fn parse(&mut self, flags: &[String]) -> CargoResult<()> { + if !flags.is_empty() && !nightly_features_allowed() { + failure::bail!( + "the `-Z` flag is only accepted on the nightly channel of Cargo, \ + but this is the `{}` channel\n\ + {}", + channel(), + SEE_CHANNELS + ); + } + for flag in flags { + self.add(flag)?; + } + Ok(()) + } + + fn add(&mut self, flag: &str) -> CargoResult<()> { + let mut parts = flag.splitn(2, '='); + let k = parts.next().unwrap(); + let v = parts.next(); + + fn parse_bool(value: Option<&str>) -> CargoResult { + match value { + None | Some("yes") => Ok(true), + Some("no") => Ok(false), + Some(s) => failure::bail!("expected `no` or `yes`, found: {}", s), + } + } + + match k { + "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(v)?, + "unstable-options" => self.unstable_options = true, + "no-index-update" => self.no_index_update = true, + "avoid-dev-deps" => self.avoid_dev_deps = true, + "minimal-versions" => self.minimal_versions = true, + "package-features" => self.package_features = true, + "advanced-env" => self.advanced_env = true, + "config-profile" => self.config_profile = true, + "dual-proc-macros" => self.dual_proc_macros = true, + "mtime-on-use" => self.mtime_on_use = true, + "install-upgrade" => self.install_upgrade = true, + "cache-messages" => self.cache_messages = true, + "binary-dep-depinfo" => self.binary_dep_depinfo = true, + _ => failure::bail!("unknown `-Z` flag specified: {}", k), + } + + Ok(()) + } + + /// Generates an error if `-Z unstable-options` was not used. + /// Intended to be used when a user passes a command-line flag that + /// requires `-Z unstable-options`. + pub fn fail_if_stable_opt(&self, flag: &str, issue: u32) -> CargoResult<()> { + if !self.unstable_options { + let see = format!( + "See https://github.com/rust-lang/cargo/issues/{} for more \ + information about the `{}` flag.", + issue, flag + ); + if nightly_features_allowed() { + failure::bail!( + "the `{}` flag is unstable, pass `-Z unstable-options` to enable it\n\ + {}", + flag, + see + ); + } else { + failure::bail!( + "the `{}` flag is unstable, and only available on the nightly channel \ + of Cargo, but this is the `{}` channel\n\ + {}\n\ + {}", + flag, + channel(), + SEE_CHANNELS, + see + ); + } + } + Ok(()) + } +} + +/// Returns the current release channel ("stable", "beta", "nightly", "dev"). +pub fn channel() -> String { + if let Ok(override_channel) = env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS") { + return override_channel; + } + if let Ok(staging) = env::var("RUSTC_BOOTSTRAP") { + if staging == "1" { + return "dev".to_string(); + } + } + crate::version() + .cfg_info + .map(|c| c.release_channel) + .unwrap_or_else(|| String::from("dev")) +} + +thread_local!( + static NIGHTLY_FEATURES_ALLOWED: Cell = Cell::new(false); + static ENABLE_NIGHTLY_FEATURES: Cell = Cell::new(false); +); + +/// This is a little complicated. +/// This should return false if: +/// - this is an artifact of the rustc distribution process for "stable" or for "beta" +/// - this is an `#[test]` that does not opt in with `enable_nightly_features` +/// - this is a integration test that uses `ProcessBuilder` +/// that does not opt in with `masquerade_as_nightly_cargo` +/// This should return true if: +/// - this is an artifact of the rustc distribution process for "nightly" +/// - this is being used in the rustc distribution process internally +/// - this is a cargo executable that was built from source +/// - this is an `#[test]` that called `enable_nightly_features` +/// - this is a integration test that uses `ProcessBuilder` +/// that called `masquerade_as_nightly_cargo` +pub fn nightly_features_allowed() -> bool { + if ENABLE_NIGHTLY_FEATURES.with(|c| c.get()) { + return true; + } + match &channel()[..] { + "nightly" | "dev" => NIGHTLY_FEATURES_ALLOWED.with(|c| c.get()), + _ => false, + } +} + +/// Allows nightly features to be enabled for this thread, but only if the +/// development channel is nightly or dev. +/// +/// Used by cargo main to ensure that a cargo build from source has nightly features +pub fn maybe_allow_nightly_features() { + NIGHTLY_FEATURES_ALLOWED.with(|c| c.set(true)); +} + +/// Forcibly enables nightly features for this thread. +/// +/// Used by tests to allow the use of nightly features. +pub fn enable_nightly_features() { + ENABLE_NIGHTLY_FEATURES.with(|c| c.set(true)); +} diff --git a/src/cargo/core/interning.rs b/src/cargo/core/interning.rs new file mode 100644 index 00000000000..c5f97504490 --- /dev/null +++ b/src/cargo/core/interning.rs @@ -0,0 +1,107 @@ +use serde::{Serialize, Serializer}; + +use std::borrow::Borrow; +use std::cmp::Ordering; +use std::collections::HashSet; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::ops::Deref; +use std::ptr; +use std::str; +use std::sync::Mutex; + +pub fn leak(s: String) -> &'static str { + Box::leak(s.into_boxed_str()) +} + +lazy_static::lazy_static! { + static ref STRING_CACHE: Mutex> = Mutex::new(HashSet::new()); +} + +#[derive(Clone, Copy)] +pub struct InternedString { + inner: &'static str, +} + +impl PartialEq for InternedString { + fn eq(&self, other: &InternedString) -> bool { + ptr::eq(self.as_str(), other.as_str()) + } +} + +impl Eq for InternedString {} + +impl InternedString { + pub fn new(str: &str) -> InternedString { + let mut cache = STRING_CACHE.lock().unwrap(); + let s = cache.get(str).cloned().unwrap_or_else(|| { + let s = leak(str.to_string()); + cache.insert(s); + s + }); + + InternedString { inner: s } + } + + pub fn as_str(&self) -> &'static str { + self.inner + } +} + +impl Deref for InternedString { + type Target = str; + + fn deref(&self) -> &'static str { + self.as_str() + } +} + +impl Hash for InternedString { + // N.B., we can't implement this as `identity(self).hash(state)`, + // because we use this for on-disk fingerprints and so need + // stability across Cargo invocations. + fn hash(&self, state: &mut H) { + self.as_str().hash(state); + } +} + +impl Borrow for InternedString { + // If we implement Hash as `identity(self).hash(state)`, + // then this will need to be removed. + fn borrow(&self) -> &str { + self.as_str() + } +} + +impl fmt::Debug for InternedString { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(self.as_str(), f) + } +} + +impl fmt::Display for InternedString { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self.as_str(), f) + } +} + +impl Ord for InternedString { + fn cmp(&self, other: &InternedString) -> Ordering { + self.as_str().cmp(other.as_str()) + } +} + +impl PartialOrd for InternedString { + fn partial_cmp(&self, other: &InternedString) -> Option { + Some(self.cmp(other)) + } +} + +impl Serialize for InternedString { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.inner) + } +} diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index cc09b2d120e..2c57c9e4a43 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -1,25 +1,72 @@ -use std::default::Default; -use std::path::{PathBuf, Path}; +use std::collections::{BTreeMap, HashMap}; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::path::{Path, PathBuf}; +use std::rc::Rc; use semver::Version; -use rustc_serialize::{Encoder,Encodable}; - -use core::{Dependency, PackageId, Summary}; -use core::package_id::Metadata; -use core::dependency::SerializedDependency; -use util::{CargoResult, human}; +use serde::ser; +use serde::Serialize; +use url::Url; + +use crate::core::interning::InternedString; +use crate::core::profiles::Profiles; +use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; +use crate::core::{Edition, Feature, Features, WorkspaceConfig}; +use crate::util::errors::*; +use crate::util::toml::TomlManifest; +use crate::util::{short_hash, Config, Filesystem}; + +pub enum EitherManifest { + Real(Manifest), + Virtual(VirtualManifest), +} -/// Contains all the informations about a package, as loaded from a Cargo.toml. +/// Contains all the information about a package, as loaded from a `Cargo.toml`. #[derive(Clone, Debug)] pub struct Manifest { summary: Summary, targets: Vec, links: Option, - warnings: Vec, + warnings: Warnings, exclude: Vec, include: Vec, metadata: ManifestMetadata, + custom_metadata: Option, profiles: Profiles, + publish: Option>, + publish_lockfile: bool, + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + original: Rc, + features: Features, + edition: Edition, + im_a_teapot: Option, + default_run: Option, + metabuild: Option>, +} + +/// When parsing `Cargo.toml`, some warnings should silenced +/// if the manifest comes from a dependency. `ManifestWarning` +/// allows this delayed emission of warnings. +#[derive(Clone, Debug)] +pub struct DelayedWarning { + pub message: String, + pub is_critical: bool, +} + +#[derive(Clone, Debug)] +pub struct Warnings(Vec); + +#[derive(Clone, Debug)] +pub struct VirtualManifest { + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + profiles: Profiles, + warnings: Warnings, + features: Features, } /// General metadata about a package which is just blindly uploaded to the @@ -34,295 +81,707 @@ pub struct Manifest { pub struct ManifestMetadata { pub authors: Vec, pub keywords: Vec, + pub categories: Vec, pub license: Option, pub license_file: Option, - pub description: Option, // not markdown - pub readme: Option, // file, not contents - pub homepage: Option, // url - pub repository: Option, // url - pub documentation: Option, // url -} - -#[derive(PartialEq,Clone,RustcEncodable)] -pub struct SerializedManifest { - name: String, - version: String, - dependencies: Vec, - targets: Vec, + pub description: Option, // Not in Markdown + pub readme: Option, // File, not contents + pub homepage: Option, // URL + pub repository: Option, // URL + pub documentation: Option, // URL + pub badges: BTreeMap>, + pub links: Option, } -impl Encodable for Manifest { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - SerializedManifest { - name: self.summary.name().to_string(), - version: self.summary.version().to_string(), - dependencies: self.summary.dependencies().iter().map(|d| { - SerializedDependency::from_dependency(d) - }).collect(), - targets: self.targets.clone(), - }.encode(s) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, RustcEncodable, Copy)] +#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum LibKind { Lib, Rlib, Dylib, - StaticLib + ProcMacro, + Other(String), } impl LibKind { - pub fn from_str(string: &str) -> CargoResult { - match string { - "lib" => Ok(LibKind::Lib), - "rlib" => Ok(LibKind::Rlib), - "dylib" => Ok(LibKind::Dylib), - "staticlib" => Ok(LibKind::StaticLib), - _ => Err(human(format!("crate-type \"{}\" was not one of lib|rlib|dylib|staticlib", - string))) - } - } - /// Returns the argument suitable for `--crate-type` to pass to rustc. - pub fn crate_type(&self) -> &'static str { + pub fn crate_type(&self) -> &str { match *self { LibKind::Lib => "lib", LibKind::Rlib => "rlib", LibKind::Dylib => "dylib", - LibKind::StaticLib => "staticlib" + LibKind::ProcMacro => "proc-macro", + LibKind::Other(ref s) => s, + } + } + + pub fn linkable(&self) -> bool { + match *self { + LibKind::Lib | LibKind::Rlib | LibKind::Dylib | LibKind::ProcMacro => true, + LibKind::Other(..) => false, + } + } + + pub fn requires_upstream_objects(&self) -> bool { + match *self { + // "lib" == "rlib" and is a compilation that doesn't actually + // require upstream object files to exist, only upstream metadata + // files. As a result, it doesn't require upstream artifacts + LibKind::Lib | LibKind::Rlib => false, + + // Everything else, however, is some form of "linkable output" or + // something that requires upstream object files. + _ => true, + } + } +} + +impl fmt::Debug for LibKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.crate_type().fmt(f) + } +} + +impl<'a> From<&'a String> for LibKind { + fn from(string: &'a String) -> Self { + match string.as_ref() { + "lib" => LibKind::Lib, + "rlib" => LibKind::Rlib, + "dylib" => LibKind::Dylib, + "proc-macro" => LibKind::ProcMacro, + s => LibKind::Other(s.to_string()), } } } -#[derive(Debug, Clone, Hash, PartialEq, RustcEncodable, Eq)] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum TargetKind { Lib(Vec), Bin, Test, Bench, - Example, + ExampleLib(Vec), + ExampleBin, CustomBuild, } -#[derive(RustcEncodable, RustcDecodable, Clone, PartialEq, Eq, Debug, Hash)] -pub struct Profile { - pub opt_level: u32, - pub lto: bool, - pub codegen_units: Option, // None = use rustc default - pub rustc_args: Option>, - pub debuginfo: bool, - pub debug_assertions: bool, - pub rpath: bool, - pub test: bool, - pub doc: bool, +impl ser::Serialize for TargetKind { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + use self::TargetKind::*; + match *self { + Lib(ref kinds) => s.collect_seq(kinds.iter().map(LibKind::crate_type)), + Bin => ["bin"].serialize(s), + ExampleBin | ExampleLib(_) => ["example"].serialize(s), + Test => ["test"].serialize(s), + CustomBuild => ["custom-build"].serialize(s), + Bench => ["bench"].serialize(s), + } + } +} + +impl fmt::Debug for TargetKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + use self::TargetKind::*; + match *self { + Lib(ref kinds) => kinds.fmt(f), + Bin => "bin".fmt(f), + ExampleBin | ExampleLib(_) => "example".fmt(f), + Test => "test".fmt(f), + CustomBuild => "custom-build".fmt(f), + Bench => "bench".fmt(f), + } + } } -#[derive(Default, Clone, Debug)] -pub struct Profiles { - pub release: Profile, - pub dev: Profile, - pub test: Profile, - pub bench: Profile, - pub doc: Profile, +impl TargetKind { + pub fn description(&self) -> &'static str { + match self { + TargetKind::Lib(..) => "lib", + TargetKind::Bin => "bin", + TargetKind::Test => "integration-test", + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example", + TargetKind::Bench => "bench", + TargetKind::CustomBuild => "build-script", + } + } + + /// Returns whether production of this artifact requires the object files + /// from dependencies to be available. + /// + /// This only returns `false` when all we're producing is an rlib, otherwise + /// it will return `true`. + pub fn requires_upstream_objects(&self) -> bool { + match self { + TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => { + kinds.iter().any(|k| k.requires_upstream_objects()) + } + _ => true, + } + } } -/// Informations about a binary, a library, an example, etc. that is part of the +/// Information about a binary, a library, an example, etc. that is part of the /// package. -#[derive(Clone, Hash, PartialEq, Eq, Debug)] +#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct Target { kind: TargetKind, name: String, - src_path: PathBuf, - metadata: Option, + // Note that the `src_path` here is excluded from the `Hash` implementation + // as it's absolute currently and is otherwise a little too brittle for + // causing rebuilds. Instead the hash for the path that we send to the + // compiler is handled elsewhere. + src_path: TargetSourcePath, + required_features: Option>, tested: bool, benched: bool, doc: bool, doctest: bool, harness: bool, // whether to use the test harness (--test) for_host: bool, + proc_macro: bool, + edition: Edition, } -#[derive(RustcEncodable)] -pub struct SerializedTarget { - kind: Vec<&'static str>, - name: String, - src_path: String, - metadata: Option +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] +pub enum TargetSourcePath { + Path(PathBuf), + Metabuild, } -impl Encodable for Target { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let kind = match self.kind { - TargetKind::Lib(ref kinds) => { - kinds.iter().map(|k| k.crate_type()).collect() - } - TargetKind::Bin => vec!("bin"), - TargetKind::Example => vec!["example"], - TargetKind::Test => vec!["test"], - TargetKind::CustomBuild => vec!["custom-build"], - TargetKind::Bench => vec!["bench"], - }; +impl TargetSourcePath { + pub fn path(&self) -> Option<&Path> { + match self { + TargetSourcePath::Path(path) => Some(path.as_ref()), + TargetSourcePath::Metabuild => None, + } + } + + pub fn is_path(&self) -> bool { + match self { + TargetSourcePath::Path(_) => true, + _ => false, + } + } +} +impl Hash for TargetSourcePath { + fn hash(&self, _: &mut H) { + // ... + } +} + +impl fmt::Debug for TargetSourcePath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + TargetSourcePath::Path(path) => path.fmt(f), + TargetSourcePath::Metabuild => "metabuild".fmt(f), + } + } +} + +impl From for TargetSourcePath { + fn from(path: PathBuf) -> Self { + assert!(path.is_absolute(), "`{}` is not absolute", path.display()); + TargetSourcePath::Path(path) + } +} + +#[derive(Serialize)] +struct SerializedTarget<'a> { + /// Is this a `--bin bin`, `--lib`, `--example ex`? + /// Serialized as a list of strings for historical reasons. + kind: &'a TargetKind, + /// Corresponds to `--crate-type` compiler attribute. + /// See https://doc.rust-lang.org/reference/linkage.html + crate_types: Vec<&'a str>, + name: &'a str, + src_path: Option<&'a PathBuf>, + edition: &'a str, + #[serde(rename = "required-features", skip_serializing_if = "Option::is_none")] + required_features: Option>, + doctest: bool, +} + +impl ser::Serialize for Target { + fn serialize(&self, s: S) -> Result { + let src_path = match &self.src_path { + TargetSourcePath::Path(p) => Some(p), + // Unfortunately getting the correct path would require access to + // target_dir, which is not available here. + TargetSourcePath::Metabuild => None, + }; SerializedTarget { - kind: kind, - name: self.name.clone(), - src_path: self.src_path.display().to_string(), - metadata: self.metadata.clone() - }.encode(s) + kind: &self.kind, + crate_types: self.rustc_crate_types(), + name: &self.name, + src_path, + edition: &self.edition.to_string(), + required_features: self + .required_features + .as_ref() + .map(|rf| rf.iter().map(|s| &**s).collect()), + doctest: self.doctest && self.doctestable(), + } + .serialize(s) + } +} + +compact_debug! { + impl fmt::Debug for Target { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let (default, default_name) = { + match &self.kind { + TargetKind::Lib(kinds) => { + ( + Target::lib_target( + &self.name, + kinds.clone(), + self.src_path().path().unwrap().to_path_buf(), + self.edition, + ), + format!("lib_target({:?}, {:?}, {:?}, {:?})", + self.name, kinds, self.src_path, self.edition), + ) + } + TargetKind::CustomBuild => { + match self.src_path { + TargetSourcePath::Path(ref path) => { + ( + Target::custom_build_target( + &self.name, + path.to_path_buf(), + self.edition, + ), + format!("custom_build_target({:?}, {:?}, {:?})", + self.name, path, self.edition), + ) + } + TargetSourcePath::Metabuild => { + ( + Target::metabuild_target(&self.name), + format!("metabuild_target({:?})", self.name), + ) + } + } + } + _ => ( + Target::new(self.src_path.clone(), self.edition), + format!("with_path({:?}, {:?})", self.src_path, self.edition), + ), + } + }; + [debug_the_fields( + kind + name + src_path + required_features + tested + benched + doc + doctest + harness + for_host + proc_macro + edition + )] + } } } impl Manifest { - pub fn new(summary: Summary, targets: Vec, - exclude: Vec, - include: Vec, - links: Option, - metadata: ManifestMetadata, - profiles: Profiles) -> Manifest { + pub fn new( + summary: Summary, + targets: Vec, + exclude: Vec, + include: Vec, + links: Option, + metadata: ManifestMetadata, + custom_metadata: Option, + profiles: Profiles, + publish: Option>, + publish_lockfile: bool, + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + features: Features, + edition: Edition, + im_a_teapot: Option, + default_run: Option, + original: Rc, + metabuild: Option>, + ) -> Manifest { Manifest { - summary: summary, - targets: targets, - warnings: Vec::new(), - exclude: exclude, - include: include, - links: links, - metadata: metadata, - profiles: profiles, - } - } - - pub fn dependencies(&self) -> &[Dependency] { self.summary.dependencies() } - pub fn exclude(&self) -> &[String] { &self.exclude } - pub fn include(&self) -> &[String] { &self.include } - pub fn metadata(&self) -> &ManifestMetadata { &self.metadata } - pub fn name(&self) -> &str { self.package_id().name() } - pub fn package_id(&self) -> &PackageId { self.summary.package_id() } - pub fn summary(&self) -> &Summary { &self.summary } - pub fn targets(&self) -> &[Target] { &self.targets } - pub fn version(&self) -> &Version { self.package_id().version() } - pub fn warnings(&self) -> &[String] { &self.warnings } - pub fn profiles(&self) -> &Profiles { &self.profiles } + summary, + targets, + warnings: Warnings::new(), + exclude, + include, + links, + metadata, + custom_metadata, + profiles, + publish, + replace, + patch, + workspace, + features, + edition, + original, + im_a_teapot, + default_run, + publish_lockfile, + metabuild, + } + } + + pub fn dependencies(&self) -> &[Dependency] { + self.summary.dependencies() + } + pub fn exclude(&self) -> &[String] { + &self.exclude + } + pub fn include(&self) -> &[String] { + &self.include + } + pub fn metadata(&self) -> &ManifestMetadata { + &self.metadata + } + pub fn name(&self) -> InternedString { + self.package_id().name() + } + pub fn package_id(&self) -> PackageId { + self.summary.package_id() + } + pub fn summary(&self) -> &Summary { + &self.summary + } + pub fn summary_mut(&mut self) -> &mut Summary { + &mut self.summary + } + pub fn targets(&self) -> &[Target] { + &self.targets + } + pub fn version(&self) -> &Version { + self.package_id().version() + } + pub fn warnings_mut(&mut self) -> &mut Warnings { + &mut self.warnings + } + pub fn warnings(&self) -> &Warnings { + &self.warnings + } + pub fn profiles(&self) -> &Profiles { + &self.profiles + } + pub fn publish(&self) -> &Option> { + &self.publish + } + pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { + &self.replace + } + pub fn original(&self) -> &TomlManifest { + &self.original + } + pub fn patch(&self) -> &HashMap> { + &self.patch + } pub fn links(&self) -> Option<&str> { self.links.as_ref().map(|s| &s[..]) } - pub fn add_warning(&mut self, s: String) { - self.warnings.push(s) + pub fn workspace_config(&self) -> &WorkspaceConfig { + &self.workspace + } + + pub fn features(&self) -> &Features { + &self.features + } + + pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Manifest { + Manifest { + summary: self.summary.map_source(to_replace, replace_with), + ..self + } + } + + pub fn feature_gate(&self) -> CargoResult<()> { + if self.im_a_teapot.is_some() { + self.features + .require(Feature::test_dummy_unstable()) + .chain_err(|| { + failure::format_err!( + "the `im-a-teapot` manifest key is unstable and may \ + not work properly in England" + ) + })?; + } + + Ok(()) + } + + // Just a helper function to test out `-Z` flags on Cargo + pub fn print_teapot(&self, config: &Config) { + if let Some(teapot) = self.im_a_teapot { + if config.cli_unstable().print_im_a_teapot { + println!("im-a-teapot = {}", teapot); + } + } + } + + pub fn edition(&self) -> Edition { + self.edition } - pub fn set_summary(&mut self, summary: Summary) { - self.summary = summary; + pub fn custom_metadata(&self) -> Option<&toml::Value> { + self.custom_metadata.as_ref() + } + + pub fn default_run(&self) -> Option<&str> { + self.default_run.as_ref().map(|s| &s[..]) + } + + pub fn metabuild(&self) -> Option<&Vec> { + self.metabuild.as_ref() + } + + pub fn metabuild_path(&self, target_dir: Filesystem) -> PathBuf { + let hash = short_hash(&self.package_id()); + target_dir + .into_path_unlocked() + .join(".metabuild") + .join(format!("metabuild-{}-{}.rs", self.name(), hash)) + } +} + +impl VirtualManifest { + pub fn new( + replace: Vec<(PackageIdSpec, Dependency)>, + patch: HashMap>, + workspace: WorkspaceConfig, + profiles: Profiles, + features: Features, + ) -> VirtualManifest { + VirtualManifest { + replace, + patch, + workspace, + profiles, + warnings: Warnings::new(), + features, + } + } + + pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { + &self.replace + } + + pub fn patch(&self) -> &HashMap> { + &self.patch + } + + pub fn workspace_config(&self) -> &WorkspaceConfig { + &self.workspace + } + + pub fn profiles(&self) -> &Profiles { + &self.profiles + } + + pub fn warnings_mut(&mut self) -> &mut Warnings { + &mut self.warnings + } + + pub fn warnings(&self) -> &Warnings { + &self.warnings + } + + pub fn features(&self) -> &Features { + &self.features } } impl Target { - fn blank() -> Target { + fn new(src_path: TargetSourcePath, edition: Edition) -> Target { Target { kind: TargetKind::Bin, name: String::new(), - src_path: PathBuf::new(), - metadata: None, + src_path, + required_features: None, doc: false, doctest: false, harness: true, for_host: false, + proc_macro: false, + edition, tested: true, benched: true, } } - pub fn lib_target(name: &str, crate_targets: Vec, - src_path: &Path, - metadata: Metadata) -> Target { + fn with_path(src_path: PathBuf, edition: Edition) -> Target { + Target::new(TargetSourcePath::from(src_path), edition) + } + + pub fn lib_target( + name: &str, + crate_targets: Vec, + src_path: PathBuf, + edition: Edition, + ) -> Target { Target { kind: TargetKind::Lib(crate_targets), name: name.to_string(), - src_path: src_path.to_path_buf(), - metadata: Some(metadata), doctest: true, doc: true, - ..Target::blank() + ..Target::with_path(src_path, edition) } } - pub fn bin_target(name: &str, src_path: &Path, - metadata: Option) -> Target { + pub fn bin_target( + name: &str, + src_path: PathBuf, + required_features: Option>, + edition: Edition, + ) -> Target { Target { kind: TargetKind::Bin, name: name.to_string(), - src_path: src_path.to_path_buf(), - metadata: metadata, + required_features, doc: true, - ..Target::blank() + ..Target::with_path(src_path, edition) } } /// Builds a `Target` corresponding to the `build = "build.rs"` entry. - pub fn custom_build_target(name: &str, src_path: &Path, - metadata: Option) -> Target { + pub fn custom_build_target(name: &str, src_path: PathBuf, edition: Edition) -> Target { + Target { + kind: TargetKind::CustomBuild, + name: name.to_string(), + for_host: true, + benched: false, + tested: false, + ..Target::with_path(src_path, edition) + } + } + + pub fn metabuild_target(name: &str) -> Target { Target { kind: TargetKind::CustomBuild, name: name.to_string(), - src_path: src_path.to_path_buf(), - metadata: metadata, for_host: true, benched: false, tested: false, - ..Target::blank() + ..Target::new(TargetSourcePath::Metabuild, Edition::Edition2018) } } - pub fn example_target(name: &str, src_path: &Path) -> Target { + pub fn example_target( + name: &str, + crate_targets: Vec, + src_path: PathBuf, + required_features: Option>, + edition: Edition, + ) -> Target { + let kind = if crate_targets.is_empty() + || crate_targets + .iter() + .all(|t| *t == LibKind::Other("bin".into())) + { + TargetKind::ExampleBin + } else { + TargetKind::ExampleLib(crate_targets) + }; + Target { - kind: TargetKind::Example, + kind, name: name.to_string(), - src_path: src_path.to_path_buf(), + required_features, + tested: false, benched: false, - ..Target::blank() + ..Target::with_path(src_path, edition) } } - pub fn test_target(name: &str, src_path: &Path, - metadata: Metadata) -> Target { + pub fn test_target( + name: &str, + src_path: PathBuf, + required_features: Option>, + edition: Edition, + ) -> Target { Target { kind: TargetKind::Test, name: name.to_string(), - src_path: src_path.to_path_buf(), - metadata: Some(metadata), + required_features, benched: false, - ..Target::blank() + ..Target::with_path(src_path, edition) } } - pub fn bench_target(name: &str, src_path: &Path, - metadata: Metadata) -> Target { + pub fn bench_target( + name: &str, + src_path: PathBuf, + required_features: Option>, + edition: Edition, + ) -> Target { Target { kind: TargetKind::Bench, name: name.to_string(), - src_path: src_path.to_path_buf(), - metadata: Some(metadata), + required_features, tested: false, - ..Target::blank() + ..Target::with_path(src_path, edition) } } - pub fn name(&self) -> &str { &self.name } - pub fn crate_name(&self) -> String { self.name.replace("-", "_") } - pub fn src_path(&self) -> &Path { &self.src_path } - pub fn metadata(&self) -> Option<&Metadata> { self.metadata.as_ref() } - pub fn kind(&self) -> &TargetKind { &self.kind } - pub fn tested(&self) -> bool { self.tested } - pub fn harness(&self) -> bool { self.harness } - pub fn documented(&self) -> bool { self.doc } - pub fn for_host(&self) -> bool { self.for_host } - pub fn benched(&self) -> bool { self.benched } - + pub fn name(&self) -> &str { + &self.name + } + pub fn crate_name(&self) -> String { + self.name.replace("-", "_") + } + pub fn src_path(&self) -> &TargetSourcePath { + &self.src_path + } + pub fn set_src_path(&mut self, src_path: TargetSourcePath) { + self.src_path = src_path; + } + pub fn required_features(&self) -> Option<&Vec> { + self.required_features.as_ref() + } + pub fn kind(&self) -> &TargetKind { + &self.kind + } + pub fn tested(&self) -> bool { + self.tested + } + pub fn harness(&self) -> bool { + self.harness + } + pub fn documented(&self) -> bool { + self.doc + } + pub fn for_host(&self) -> bool { + self.for_host + } + pub fn proc_macro(&self) -> bool { + self.proc_macro + } + pub fn edition(&self) -> Edition { + self.edition + } + pub fn benched(&self) -> bool { + self.benched + } pub fn doctested(&self) -> bool { - self.doctest && match self.kind { - TargetKind::Lib(ref kinds) => { - kinds.contains(&LibKind::Rlib) || kinds.contains(&LibKind::Lib) - } + self.doctest + } + + pub fn doctestable(&self) -> bool { + match self.kind { + TargetKind::Lib(ref kinds) => kinds + .iter() + .any(|k| *k == LibKind::Rlib || *k == LibKind::Lib || *k == LibKind::ProcMacro), _ => false, } } @@ -334,47 +793,96 @@ impl Target { pub fn is_lib(&self) -> bool { match self.kind { TargetKind::Lib(_) => true, - _ => false + _ => false, } } + pub fn is_dylib(&self) -> bool { + match self.kind { + TargetKind::Lib(ref libs) => libs.iter().any(|l| *l == LibKind::Dylib), + _ => false, + } + } + + pub fn is_cdylib(&self) -> bool { + let libs = match self.kind { + TargetKind::Lib(ref libs) => libs, + _ => return false, + }; + libs.iter().any(|l| match *l { + LibKind::Other(ref s) => s == "cdylib", + _ => false, + }) + } + + /// Returns whether this target produces an artifact which can be linked + /// into a Rust crate. + /// + /// This only returns true for certain kinds of libraries. pub fn linkable(&self) -> bool { match self.kind { - TargetKind::Lib(ref kinds) => { - kinds.iter().any(|k| { - match *k { - LibKind::Lib | LibKind::Rlib | LibKind::Dylib => true, - LibKind::StaticLib => false, - } - }) - } - _ => false + TargetKind::Lib(ref kinds) => kinds.iter().any(|k| k.linkable()), + _ => false, + } + } + + pub fn is_bin(&self) -> bool { + self.kind == TargetKind::Bin + } + + pub fn is_example(&self) -> bool { + match self.kind { + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => true, + _ => false, } } - pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin } - pub fn is_example(&self) -> bool { self.kind == TargetKind::Example } - pub fn is_test(&self) -> bool { self.kind == TargetKind::Test } - pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench } - pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild } + /// Returns `true` if it is a binary or executable example. + /// NOTE: Tests are `false`! + pub fn is_executable(&self) -> bool { + self.is_bin() || self.is_exe_example() + } + + /// Returns `true` if it is an executable example. + pub fn is_exe_example(&self) -> bool { + // Needed for --all-examples in contexts where only runnable examples make sense + match self.kind { + TargetKind::ExampleBin => true, + _ => false, + } + } + + pub fn is_test(&self) -> bool { + self.kind == TargetKind::Test + } + pub fn is_bench(&self) -> bool { + self.kind == TargetKind::Bench + } + pub fn is_custom_build(&self) -> bool { + self.kind == TargetKind::CustomBuild + } /// Returns the arguments suitable for `--crate-type` to pass to rustc. - pub fn rustc_crate_types(&self) -> Vec<&'static str> { + pub fn rustc_crate_types(&self) -> Vec<&str> { match self.kind { - TargetKind::Lib(ref kinds) => { - kinds.iter().map(|kind| kind.crate_type()).collect() - }, - TargetKind::CustomBuild | - TargetKind::Bench | - TargetKind::Test | - TargetKind::Example | - TargetKind::Bin => vec!("bin"), + TargetKind::Lib(ref kinds) | TargetKind::ExampleLib(ref kinds) => { + kinds.iter().map(LibKind::crate_type).collect() + } + TargetKind::CustomBuild + | TargetKind::Bench + | TargetKind::Test + | TargetKind::ExampleBin + | TargetKind::Bin => vec!["bin"], } } pub fn can_lto(&self) -> bool { match self.kind { - TargetKind::Lib(ref v) => *v == [LibKind::StaticLib], + TargetKind::Lib(ref v) => { + !v.contains(&LibKind::Rlib) + && !v.contains(&LibKind::Dylib) + && !v.contains(&LibKind::Lib) + } _ => true, } } @@ -395,6 +903,14 @@ impl Target { self.for_host = for_host; self } + pub fn set_proc_macro(&mut self, proc_macro: bool) -> &mut Target { + self.proc_macro = proc_macro; + self + } + pub fn set_edition(&mut self, edition: Edition) -> &mut Target { + self.edition = edition; + self + } pub fn set_harness(&mut self, harness: bool) -> &mut Target { self.harness = harness; self @@ -405,57 +921,41 @@ impl Target { } } -impl Profile { - pub fn default_dev() -> Profile { - Profile { - debuginfo: true, - debug_assertions: true, - ..Profile::default() - } - } - - pub fn default_release() -> Profile { - Profile { - opt_level: 3, - debuginfo: false, - ..Profile::default() +impl fmt::Display for Target { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.kind { + TargetKind::Lib(..) => write!(f, "Target(lib)"), + TargetKind::Bin => write!(f, "Target(bin: {})", self.name), + TargetKind::Test => write!(f, "Target(test: {})", self.name), + TargetKind::Bench => write!(f, "Target(bench: {})", self.name), + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => { + write!(f, "Target(example: {})", self.name) + } + TargetKind::CustomBuild => write!(f, "Target(script)"), } } +} - pub fn default_test() -> Profile { - Profile { - test: true, - ..Profile::default_dev() - } +impl Warnings { + fn new() -> Warnings { + Warnings(Vec::new()) } - pub fn default_bench() -> Profile { - Profile { - test: true, - ..Profile::default_release() - } + pub fn add_warning(&mut self, s: String) { + self.0.push(DelayedWarning { + message: s, + is_critical: false, + }) } - pub fn default_doc() -> Profile { - Profile { - doc: true, - ..Profile::default_dev() - } + pub fn add_critical_warning(&mut self, s: String) { + self.0.push(DelayedWarning { + message: s, + is_critical: true, + }) } -} -impl Default for Profile { - fn default() -> Profile { - Profile { - opt_level: 0, - lto: false, - codegen_units: None, - rustc_args: None, - debuginfo: false, - debug_assertions: false, - rpath: false, - test: false, - doc: false, - } + pub fn warnings(&self) -> &[DelayedWarning] { + &self.0 } } diff --git a/src/cargo/core/mod.rs b/src/cargo/core/mod.rs index b0b0895f0ef..9e285b6c266 100644 --- a/src/cargo/core/mod.rs +++ b/src/cargo/core/mod.rs @@ -1,21 +1,33 @@ -pub use self::dependency::{Dependency, DependencyInner}; -pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles}; +pub use self::dependency::Dependency; +pub use self::features::{ + enable_nightly_features, maybe_allow_nightly_features, nightly_features_allowed, +}; +pub use self::features::{CliUnstable, Edition, Feature, Features}; +pub use self::interning::InternedString; +pub use self::manifest::{EitherManifest, VirtualManifest}; +pub use self::manifest::{LibKind, Manifest, Target, TargetKind}; pub use self::package::{Package, PackageSet}; -pub use self::package_id::{PackageId, Metadata}; +pub use self::package_id::PackageId; pub use self::package_id_spec::PackageIdSpec; pub use self::registry::Registry; pub use self::resolver::Resolve; -pub use self::shell::{Shell, MultiShell, ShellConfig, Verbosity, ColorConfig}; -pub use self::source::{Source, SourceId, SourceMap, SourceSet, GitReference}; -pub use self::summary::Summary; +pub use self::shell::{Shell, Verbosity}; +pub use self::source::{GitReference, Source, SourceId, SourceMap}; +pub use self::summary::{FeatureMap, FeatureValue, Summary}; +pub use self::workspace::{Members, Workspace, WorkspaceConfig, WorkspaceRootConfig}; -pub mod source; -pub mod package; -pub mod package_id; +pub mod compiler; pub mod dependency; +pub mod features; +mod interning; pub mod manifest; +pub mod package; +pub mod package_id; +mod package_id_spec; +pub mod profiles; +pub mod registry; pub mod resolver; -pub mod summary; pub mod shell; -pub mod registry; -mod package_id_spec; +pub mod source; +pub mod summary; +mod workspace; diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index f71f5025e5d..01a9864cec9 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -1,88 +1,248 @@ -use std::fmt::{self, Formatter}; +use std::cell::{Cell, Ref, RefCell, RefMut}; +use std::cmp::Ordering; +use std::collections::{HashMap, HashSet}; +use std::fmt; use std::hash; -use std::slice; +use std::mem; use std::path::{Path, PathBuf}; +use std::time::{Duration, Instant}; + +use bytesize::ByteSize; +use curl::easy::{Easy, HttpVersion}; +use curl::multi::{EasyHandle, Multi}; +use failure::ResultExt; +use lazycell::LazyCell; +use log::{debug, warn}; use semver::Version; +use serde::ser; +use serde::Serialize; -use core::{Dependency, Manifest, PackageId, Registry, Target, Summary, Metadata}; -use core::dependency::SerializedDependency; -use util::{CargoResult, graph}; -use rustc_serialize::{Encoder,Encodable}; -use core::source::Source; +use crate::core::interning::InternedString; +use crate::core::source::MaybePackage; +use crate::core::{Dependency, Manifest, PackageId, SourceId, Target}; +use crate::core::{FeatureMap, SourceMap, Summary}; +use crate::ops; +use crate::util::config::PackageCacheLock; +use crate::util::errors::{CargoResult, CargoResultExt, HttpNot200}; +use crate::util::network::Retry; +use crate::util::{self, internal, Config, Progress, ProgressStyle}; -/// Informations about a package that is available somewhere in the file system. +/// Information about a package that is available somewhere in the file system. /// -/// A package is a `Cargo.toml` file, plus all the files that are part of it. -// TODO: Is manifest_path a relic? -#[derive(Clone, Debug)] +/// A package is a `Cargo.toml` file plus all the files that are part of it. +// +// TODO: is `manifest_path` a relic? +#[derive(Clone)] pub struct Package { - // The package's manifest + /// The package's manifest. manifest: Manifest, - // The root of the package + /// The root of the package. manifest_path: PathBuf, } -#[derive(RustcEncodable)] -struct SerializedPackage { - name: String, - version: String, - dependencies: Vec, - targets: Vec, - manifest_path: String, +impl Ord for Package { + fn cmp(&self, other: &Package) -> Ordering { + self.package_id().cmp(&other.package_id()) + } +} + +impl PartialOrd for Package { + fn partial_cmp(&self, other: &Package) -> Option { + Some(self.cmp(other)) + } } -impl Encodable for Package { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let manifest = self.manifest(); - let summary = manifest.summary(); +/// A Package in a form where `Serialize` can be derived. +#[derive(Serialize)] +struct SerializedPackage<'a> { + name: &'a str, + version: &'a Version, + id: PackageId, + license: Option<&'a str>, + license_file: Option<&'a str>, + description: Option<&'a str>, + source: SourceId, + dependencies: &'a [Dependency], + targets: Vec<&'a Target>, + features: &'a FeatureMap, + manifest_path: &'a Path, + metadata: Option<&'a toml::Value>, + authors: &'a [String], + categories: &'a [String], + keywords: &'a [String], + readme: Option<&'a str>, + repository: Option<&'a str>, + edition: &'a str, + links: Option<&'a str>, + #[serde(skip_serializing_if = "Option::is_none")] + metabuild: Option<&'a Vec>, +} + +impl ser::Serialize for Package { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + let summary = self.manifest.summary(); let package_id = summary.package_id(); + let manmeta = self.manifest.metadata(); + let license = manmeta.license.as_ref().map(String::as_ref); + let license_file = manmeta.license_file.as_ref().map(String::as_ref); + let description = manmeta.description.as_ref().map(String::as_ref); + let authors = manmeta.authors.as_ref(); + let categories = manmeta.categories.as_ref(); + let keywords = manmeta.keywords.as_ref(); + let readme = manmeta.readme.as_ref().map(String::as_ref); + let repository = manmeta.repository.as_ref().map(String::as_ref); + // Filter out metabuild targets. They are an internal implementation + // detail that is probably not relevant externally. There's also not a + // real path to show in `src_path`, and this avoids changing the format. + let targets: Vec<&Target> = self + .manifest + .targets() + .iter() + .filter(|t| t.src_path().is_path()) + .collect(); SerializedPackage { - name: package_id.name().to_string(), - version: package_id.version().to_string(), - dependencies: summary.dependencies().iter().map(|d| { - SerializedDependency::from_dependency(d) - }).collect(), - targets: manifest.targets().to_vec(), - manifest_path: self.manifest_path.display().to_string() - }.encode(s) + name: &*package_id.name(), + version: package_id.version(), + id: package_id, + license, + license_file, + description, + source: summary.source_id(), + dependencies: summary.dependencies(), + targets, + features: summary.features(), + manifest_path: &self.manifest_path, + metadata: self.manifest.custom_metadata(), + authors, + categories, + keywords, + readme, + repository, + edition: &self.manifest.edition().to_string(), + links: self.manifest.links(), + metabuild: self.manifest.metabuild(), + } + .serialize(s) } } impl Package { - pub fn new(manifest: Manifest, - manifest_path: &Path) -> Package { + /// Creates a package from a manifest and its location. + pub fn new(manifest: Manifest, manifest_path: &Path) -> Package { Package { - manifest: manifest, + manifest, manifest_path: manifest_path.to_path_buf(), } } - pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() } - pub fn manifest(&self) -> &Manifest { &self.manifest } - pub fn manifest_path(&self) -> &Path { &self.manifest_path } - pub fn name(&self) -> &str { self.package_id().name() } - pub fn package_id(&self) -> &PackageId { self.manifest.package_id() } - pub fn root(&self) -> &Path { self.manifest_path.parent().unwrap() } - pub fn summary(&self) -> &Summary { self.manifest.summary() } - pub fn targets(&self) -> &[Target] { self.manifest().targets() } - pub fn version(&self) -> &Version { self.package_id().version() } + /// Gets the manifest dependencies. + pub fn dependencies(&self) -> &[Dependency] { + self.manifest.dependencies() + } + /// Gets the manifest. + pub fn manifest(&self) -> &Manifest { + &self.manifest + } + /// Gets the manifest. + pub fn manifest_mut(&mut self) -> &mut Manifest { + &mut self.manifest + } + /// Gets the path to the manifest. + pub fn manifest_path(&self) -> &Path { + &self.manifest_path + } + /// Gets the name of the package. + pub fn name(&self) -> InternedString { + self.package_id().name() + } + /// Gets the `PackageId` object for the package (fully defines a package). + pub fn package_id(&self) -> PackageId { + self.manifest.package_id() + } + /// Gets the root folder of the package. + pub fn root(&self) -> &Path { + self.manifest_path.parent().unwrap() + } + /// Gets the summary for the package. + pub fn summary(&self) -> &Summary { + self.manifest.summary() + } + /// Gets the targets specified in the manifest. + pub fn targets(&self) -> &[Target] { + self.manifest.targets() + } + /// Gets the current package version. + pub fn version(&self) -> &Version { + self.package_id().version() + } + /// Gets the package authors. + pub fn authors(&self) -> &Vec { + &self.manifest.metadata().authors + } + /// Returns `true` if the package is set to publish. + pub fn publish(&self) -> &Option> { + self.manifest.publish() + } + /// Returns `true` if the package uses a custom build script for any target. pub fn has_custom_build(&self) -> bool { self.targets().iter().any(|t| t.is_custom_build()) } - pub fn generate_metadata(&self) -> Metadata { - self.package_id().generate_metadata(self.root()) + pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Package { + Package { + manifest: self.manifest.map_source(to_replace, replace_with), + manifest_path: self.manifest_path, + } + } + + pub fn to_registry_toml(&self, config: &Config) -> CargoResult { + let manifest = self.manifest().original().prepare_for_publish(config)?; + let toml = toml::to_string(&manifest)?; + Ok(format!( + "# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO\n\ + #\n\ + # When uploading crates to the registry Cargo will automatically\n\ + # \"normalize\" Cargo.toml files for maximal compatibility\n\ + # with all versions of Cargo and also rewrite `path` dependencies\n\ + # to registry (e.g., crates.io) dependencies\n\ + #\n\ + # If you believe there's an error in this file please file an\n\ + # issue against the rust-lang/cargo repository. If you're\n\ + # editing this file be aware that the upstream Cargo.toml\n\ + # will likely look very different (and much more reasonable)\n\ + \n\ + {}\ + ", + toml + )) + } + + /// Returns if package should include `Cargo.lock`. + pub fn include_lockfile(&self) -> bool { + self.targets().iter().any(|t| t.is_example() || t.is_bin()) } } impl fmt::Display for Package { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.summary().package_id()) } } +impl fmt::Debug for Package { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Package") + .field("id", &self.summary().package_id()) + .field("..", &"..") + .finish() + } +} + impl PartialEq for Package { fn eq(&self, other: &Package) -> bool { self.package_id() == other.package_id() @@ -93,86 +253,739 @@ impl Eq for Package {} impl hash::Hash for Package { fn hash(&self, into: &mut H) { - // We want to be sure that a path-based package showing up at the same - // location always has the same hash. To that effect we don't hash the - // vanilla package ID if we're a path, but instead feed in our own root - // path. - if self.package_id().source_id().is_path() { - (0, self.root(), self.name(), self.package_id().version()).hash(into) - } else { - (1, self.package_id()).hash(into) - } + self.package_id().hash(into) } } -#[derive(PartialEq,Clone,Debug)] -pub struct PackageSet { - packages: Vec, +/// A set of packages, with the intent to download. +/// +/// This is primarily used to convert a set of `PackageId`s to `Package`s. It +/// will download as needed, or used the cached download if available. +pub struct PackageSet<'cfg> { + packages: HashMap>, + sources: RefCell>, + config: &'cfg Config, + multi: Multi, + /// Used to prevent reusing the PackageSet to download twice. + downloading: Cell, + /// Whether or not to use curl HTTP/2 multiplexing. + multiplexing: bool, } -impl PackageSet { - pub fn new(packages: &[Package]) -> PackageSet { - //assert!(packages.len() > 0, - // "PackageSet must be created with at least one package") - PackageSet { packages: packages.to_vec() } +/// Helper for downloading crates. +pub struct Downloads<'a, 'cfg> { + set: &'a PackageSet<'cfg>, + /// When a download is started, it is added to this map. The key is a + /// "token" (see `Download::token`). It is removed once the download is + /// finished. + pending: HashMap, EasyHandle)>, + /// Set of packages currently being downloaded. This should stay in sync + /// with `pending`. + pending_ids: HashSet, + /// The final result of each download. A pair `(token, result)`. This is a + /// temporary holding area, needed because curl can report multiple + /// downloads at once, but the main loop (`wait`) is written to only + /// handle one at a time. + results: Vec<(usize, Result<(), curl::Error>)>, + /// The next ID to use for creating a token (see `Download::token`). + next: usize, + /// Progress bar. + progress: RefCell>>, + /// Number of downloads that have successfully finished. + downloads_finished: usize, + /// Total bytes for all successfully downloaded packages. + downloaded_bytes: u64, + /// Size (in bytes) and package name of the largest downloaded package. + largest: (u64, String), + /// Time when downloading started. + start: Instant, + /// Indicates *all* downloads were successful. + success: bool, + + /// Timeout management, both of timeout thresholds as well as whether or not + /// our connection has timed out (and accompanying message if it has). + /// + /// Note that timeout management is done manually here instead of in libcurl + /// because we want to apply timeouts to an entire batch of operations, not + /// any one particular single operation. + timeout: ops::HttpTimeout, + /// Last time bytes were received. + updated_at: Cell, + /// This is a slow-speed check. It is reset to `now + timeout_duration` + /// every time at least `threshold` bytes are received. If the current + /// time ever exceeds `next_speed_check`, then give up and report a + /// timeout error. + next_speed_check: Cell, + /// This is the slow-speed threshold byte count. It starts at the + /// configured threshold value (default 10), and is decremented by the + /// number of bytes received in each chunk. If it is <= zero, the + /// threshold has been met and data is being received fast enough not to + /// trigger a timeout; reset `next_speed_check` and set this back to the + /// configured threshold. + next_speed_check_bytes_threshold: Cell, + /// Global filesystem lock to ensure only one Cargo is downloading at a + /// time. + _lock: PackageCacheLock<'cfg>, +} + +struct Download<'cfg> { + /// The token for this download, used as the key of the `Downloads::pending` map + /// and stored in `EasyHandle` as well. + token: usize, + + /// The package that we're downloading. + id: PackageId, + + /// Actual downloaded data, updated throughout the lifetime of this download. + data: RefCell>, + + /// The URL that we're downloading from, cached here for error messages and + /// reenqueuing. + url: String, + + /// A descriptive string to print when we've finished downloading this crate. + descriptor: String, + + /// Statistics updated from the progress callback in libcurl. + total: Cell, + current: Cell, + + /// The moment we started this transfer at. + start: Instant, + timed_out: Cell>, + + /// Logic used to track retrying this download if it's a spurious failure. + retry: Retry<'cfg>, +} + +impl<'cfg> PackageSet<'cfg> { + pub fn new( + package_ids: &[PackageId], + sources: SourceMap<'cfg>, + config: &'cfg Config, + ) -> CargoResult> { + // We've enabled the `http2` feature of `curl` in Cargo, so treat + // failures here as fatal as it would indicate a build-time problem. + // + // Note that the multiplexing support is pretty new so we're having it + // off-by-default temporarily. + // + // Also note that pipelining is disabled as curl authors have indicated + // that it's buggy, and we've empirically seen that it's buggy with HTTP + // proxies. + let mut multi = Multi::new(); + let multiplexing = config + .get::>("http.multiplexing")? + .unwrap_or(true); + multi + .pipelining(false, multiplexing) + .chain_err(|| "failed to enable multiplexing/pipelining in curl")?; + + // let's not flood crates.io with connections + multi.set_max_host_connections(2)?; + + Ok(PackageSet { + packages: package_ids + .iter() + .map(|&id| (id, LazyCell::new())) + .collect(), + sources: RefCell::new(sources), + config, + multi, + downloading: Cell::new(false), + multiplexing, + }) } - pub fn len(&self) -> usize { - self.packages.len() + pub fn package_ids<'a>(&'a self) -> impl Iterator + 'a { + self.packages.keys().cloned() } - pub fn pop(&mut self) -> Package { - self.packages.pop().expect("PackageSet.pop: empty set") + pub fn enable_download<'a>(&'a self) -> CargoResult> { + assert!(!self.downloading.replace(true)); + let timeout = ops::HttpTimeout::new(self.config)?; + Ok(Downloads { + start: Instant::now(), + set: self, + next: 0, + pending: HashMap::new(), + pending_ids: HashSet::new(), + results: Vec::new(), + progress: RefCell::new(Some(Progress::with_style( + "Downloading", + ProgressStyle::Ratio, + self.config, + ))), + downloads_finished: 0, + downloaded_bytes: 0, + largest: (0, String::new()), + success: false, + updated_at: Cell::new(Instant::now()), + timeout, + next_speed_check: Cell::new(Instant::now()), + next_speed_check_bytes_threshold: Cell::new(0), + _lock: self.config.acquire_package_cache_lock()?, + }) } - /// Get a package by name out of the set - pub fn get(&self, name: &str) -> &Package { - self.packages.iter().find(|pkg| name == pkg.name()) - .expect("PackageSet.get: empty set") + pub fn get_one(&self, id: PackageId) -> CargoResult<&Package> { + Ok(self.get_many(Some(id))?.remove(0)) } - pub fn get_all(&self, names: &[&str]) -> Vec<&Package> { - names.iter().map(|name| self.get(*name) ).collect() + pub fn get_many(&self, ids: impl IntoIterator) -> CargoResult> { + let mut pkgs = Vec::new(); + let mut downloads = self.enable_download()?; + for id in ids { + pkgs.extend(downloads.start(id)?); + } + while downloads.remaining() > 0 { + pkgs.push(downloads.wait()?); + } + downloads.success = true; + Ok(pkgs) } - pub fn packages(&self) -> &[Package] { &self.packages } + pub fn sources(&self) -> Ref<'_, SourceMap<'cfg>> { + self.sources.borrow() + } - // For now, assume that the package set contains only one package with a - // given name - pub fn sort(&self) -> Option { - let mut graph = graph::Graph::new(); + pub fn sources_mut(&self) -> RefMut<'_, SourceMap<'cfg>> { + self.sources.borrow_mut() + } +} + +// When dynamically linked against libcurl, we want to ignore some failures +// when using old versions that don't support certain features. +macro_rules! try_old_curl { + ($e:expr, $msg:expr) => { + let result = $e; + if cfg!(target_os = "macos") { + if let Err(e) = result { + warn!("ignoring libcurl {} error: {}", $msg, e); + } + } else { + result.with_context(|_| { + failure::format_err!("failed to enable {}, is curl not built right?", $msg) + })?; + } + }; +} - for pkg in self.packages.iter() { - let deps: Vec<&str> = pkg.dependencies().iter() - .map(|dep| dep.name()) - .collect(); +impl<'a, 'cfg> Downloads<'a, 'cfg> { + /// Starts to download the package for the `id` specified. + /// + /// Returns `None` if the package is queued up for download and will + /// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if + /// the package is ready and doesn't need to be downloaded. + pub fn start(&mut self, id: PackageId) -> CargoResult> { + Ok(self + .start_inner(id) + .chain_err(|| format!("failed to download `{}`", id))?) + } - graph.add(pkg.name(), &deps); + fn start_inner(&mut self, id: PackageId) -> CargoResult> { + // First up see if we've already cached this package, in which case + // there's nothing to do. + let slot = self + .set + .packages + .get(&id) + .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?; + if let Some(pkg) = slot.borrow() { + return Ok(Some(pkg)); } - let pkgs = match graph.sort() { - Some(pkgs) => pkgs, - None => return None, + // Ask the original source fo this `PackageId` for the corresponding + // package. That may immediately come back and tell us that the package + // is ready, or it could tell us that it needs to be downloaded. + let mut sources = self.set.sources.borrow_mut(); + let source = sources + .get_mut(id.source_id()) + .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; + let pkg = source + .download(id) + .chain_err(|| failure::format_err!("unable to get packages from source"))?; + let (url, descriptor) = match pkg { + MaybePackage::Ready(pkg) => { + debug!("{} doesn't need a download", id); + assert!(slot.fill(pkg).is_ok()); + return Ok(Some(slot.borrow().unwrap())); + } + MaybePackage::Download { url, descriptor } => (url, descriptor), }; - let pkgs = pkgs.iter().map(|name| { - self.get(*name).clone() - }).collect(); - Some(PackageSet { - packages: pkgs - }) + // Ok we're going to download this crate, so let's set up all our + // internal state and hand off an `Easy` handle to our libcurl `Multi` + // handle. This won't actually start the transfer, but later it'll + // happen during `wait_for_download` + let token = self.next; + self.next += 1; + debug!("downloading {} as {}", id, token); + assert!(self.pending_ids.insert(id)); + + let (mut handle, _timeout) = ops::http_handle_and_timeout(self.set.config)?; + handle.get(true)?; + handle.url(&url)?; + handle.follow_location(true)?; // follow redirects + + // Enable HTTP/2 to be used as it'll allow true multiplexing which makes + // downloads much faster. + // + // Currently Cargo requests the `http2` feature of the `curl` crate + // which means it should always be built in. On OSX, however, we ship + // cargo still linked against the system libcurl. Building curl with + // ALPN support for HTTP/2 requires newer versions of OSX (the + // SecureTransport API) than we want to ship Cargo for. By linking Cargo + // against the system libcurl then older curl installations won't use + // HTTP/2 but newer ones will. All that to basically say we ignore + // errors here on OSX, but consider this a fatal error to not activate + // HTTP/2 on all other platforms. + if self.set.multiplexing { + try_old_curl!(handle.http_version(HttpVersion::V2), "HTTP2"); + } else { + handle.http_version(HttpVersion::V11)?; + } + + // This is an option to `libcurl` which indicates that if there's a + // bunch of parallel requests to the same host they all wait until the + // pipelining status of the host is known. This means that we won't + // initiate dozens of connections to crates.io, but rather only one. + // Once the main one is opened we realized that pipelining is possible + // and multiplexing is possible with static.crates.io. All in all this + // reduces the number of connections done to a more manageable state. + try_old_curl!(handle.pipewait(true), "pipewait"); + + handle.write_function(move |buf| { + debug!("{} - {} bytes of data", token, buf.len()); + tls::with(|downloads| { + if let Some(downloads) = downloads { + downloads.pending[&token] + .0 + .data + .borrow_mut() + .extend_from_slice(buf); + } + }); + Ok(buf.len()) + })?; + + handle.progress(true)?; + handle.progress_function(move |dl_total, dl_cur, _, _| { + tls::with(|downloads| match downloads { + Some(d) => d.progress(token, dl_total as u64, dl_cur as u64), + None => false, + }) + })?; + + // If the progress bar isn't enabled then it may be awhile before the + // first crate finishes downloading so we inform immediately that we're + // downloading crates here. + if self.downloads_finished == 0 + && self.pending.is_empty() + && !self.progress.borrow().as_ref().unwrap().is_enabled() + { + self.set + .config + .shell() + .status("Downloading", "crates ...")?; + } + + let dl = Download { + token, + data: RefCell::new(Vec::new()), + id, + url, + descriptor, + total: Cell::new(0), + current: Cell::new(0), + start: Instant::now(), + timed_out: Cell::new(None), + retry: Retry::new(self.set.config)?, + }; + self.enqueue(dl, handle)?; + self.tick(WhyTick::DownloadStarted)?; + + Ok(None) + } + + /// Returns the number of crates that are still downloading. + pub fn remaining(&self) -> usize { + self.pending.len() + } + + /// Blocks the current thread waiting for a package to finish downloading. + /// + /// This method will wait for a previously enqueued package to finish + /// downloading and return a reference to it after it's done downloading. + /// + /// # Panics + /// + /// This function will panic if there are no remaining downloads. + pub fn wait(&mut self) -> CargoResult<&'a Package> { + let (dl, data) = loop { + assert_eq!(self.pending.len(), self.pending_ids.len()); + let (token, result) = self.wait_for_curl()?; + debug!("{} finished with {:?}", token, result); + + let (mut dl, handle) = self + .pending + .remove(&token) + .expect("got a token for a non-in-progress transfer"); + let data = mem::replace(&mut *dl.data.borrow_mut(), Vec::new()); + let mut handle = self.set.multi.remove(handle)?; + self.pending_ids.remove(&dl.id); + + // Check if this was a spurious error. If it was a spurious error + // then we want to re-enqueue our request for another attempt and + // then we wait for another request to finish. + let ret = { + let timed_out = &dl.timed_out; + let url = &dl.url; + dl.retry + .r#try(|| { + if let Err(e) = result { + // If this error is "aborted by callback" then that's + // probably because our progress callback aborted due to + // a timeout. We'll find out by looking at the + // `timed_out` field, looking for a descriptive message. + // If one is found we switch the error code (to ensure + // it's flagged as spurious) and then attach our extra + // information to the error. + if !e.is_aborted_by_callback() { + return Err(e.into()); + } + + return Err(match timed_out.replace(None) { + Some(msg) => { + let code = curl_sys::CURLE_OPERATION_TIMEDOUT; + let mut err = curl::Error::new(code); + err.set_extra(msg); + err + } + None => e, + } + .into()); + } + + let code = handle.response_code()?; + if code != 200 && code != 0 { + let url = handle.effective_url()?.unwrap_or(url); + return Err(HttpNot200 { + code, + url: url.to_string(), + } + .into()); + } + Ok(()) + }) + .chain_err(|| format!("failed to download from `{}`", dl.url))? + }; + match ret { + Some(()) => break (dl, data), + None => { + self.pending_ids.insert(dl.id); + self.enqueue(dl, handle)? + } + } + }; + + // If the progress bar isn't enabled then we still want to provide some + // semblance of progress of how we're downloading crates, and if the + // progress bar is enabled this provides a good log of what's happening. + self.progress.borrow_mut().as_mut().unwrap().clear(); + self.set + .config + .shell() + .status("Downloaded", &dl.descriptor)?; + + self.downloads_finished += 1; + self.downloaded_bytes += dl.total.get(); + if dl.total.get() > self.largest.0 { + self.largest = (dl.total.get(), dl.id.name().to_string()); + } + + // We're about to synchronously extract the crate below. While we're + // doing that our download progress won't actually be updated, nor do we + // have a great view into the progress of the extraction. Let's prepare + // the user for this CPU-heavy step if it looks like it'll take some + // time to do so. + if dl.total.get() < ByteSize::kb(400).0 { + self.tick(WhyTick::DownloadFinished)?; + } else { + self.tick(WhyTick::Extracting(&dl.id.name()))?; + } + + // Inform the original source that the download is finished which + // should allow us to actually get the package and fill it in now. + let mut sources = self.set.sources.borrow_mut(); + let source = sources + .get_mut(dl.id.source_id()) + .ok_or_else(|| internal(format!("couldn't find source for `{}`", dl.id)))?; + let start = Instant::now(); + let pkg = source.finish_download(dl.id, data)?; + + // Assume that no time has passed while we were calling + // `finish_download`, update all speed checks and timeout limits of all + // active downloads to make sure they don't fire because of a slowly + // extracted tarball. + let finish_dur = start.elapsed(); + self.updated_at.set(self.updated_at.get() + finish_dur); + self.next_speed_check + .set(self.next_speed_check.get() + finish_dur); + + let slot = &self.set.packages[&dl.id]; + assert!(slot.fill(pkg).is_ok()); + Ok(slot.borrow().unwrap()) + } + + fn enqueue(&mut self, dl: Download<'cfg>, handle: Easy) -> CargoResult<()> { + let mut handle = self.set.multi.add(handle)?; + let now = Instant::now(); + handle.set_token(dl.token)?; + self.updated_at.set(now); + self.next_speed_check.set(now + self.timeout.dur); + self.next_speed_check_bytes_threshold + .set(u64::from(self.timeout.low_speed_limit)); + dl.timed_out.set(None); + dl.current.set(0); + dl.total.set(0); + self.pending.insert(dl.token, (dl, handle)); + Ok(()) + } + + /// Block, waiting for curl. Returns a token and a `Result` for that token + /// (`Ok` means the download successfully finished). + fn wait_for_curl(&mut self) -> CargoResult<(usize, Result<(), curl::Error>)> { + // This is the main workhorse loop. We use libcurl's portable `wait` + // method to actually perform blocking. This isn't necessarily too + // efficient in terms of fd management, but we should only be juggling + // a few anyway. + // + // Here we start off by asking the `multi` handle to do some work via + // the `perform` method. This will actually do I/O work (non-blocking) + // and attempt to make progress. Afterwards we ask about the `messages` + // contained in the handle which will inform us if anything has finished + // transferring. + // + // If we've got a finished transfer after all that work we break out + // and process the finished transfer at the end. Otherwise we need to + // actually block waiting for I/O to happen, which we achieve with the + // `wait` method on `multi`. + loop { + let n = tls::set(self, || { + self.set + .multi + .perform() + .chain_err(|| "failed to perform http requests") + })?; + debug!("handles remaining: {}", n); + let results = &mut self.results; + let pending = &self.pending; + self.set.multi.messages(|msg| { + let token = msg.token().expect("failed to read token"); + let handle = &pending[&token].1; + if let Some(result) = msg.result_for(handle) { + results.push((token, result)); + } else { + debug!("message without a result (?)"); + } + }); + + if let Some(pair) = results.pop() { + break Ok(pair); + } + assert!(!self.pending.is_empty()); + let timeout = self + .set + .multi + .get_timeout()? + .unwrap_or_else(|| Duration::new(5, 0)); + self.set + .multi + .wait(&mut [], timeout) + .chain_err(|| "failed to wait on curl `Multi`")?; + } + } + + fn progress(&self, token: usize, total: u64, cur: u64) -> bool { + let dl = &self.pending[&token].0; + dl.total.set(total); + let now = Instant::now(); + if cur != dl.current.get() { + let delta = cur - dl.current.get(); + let threshold = self.next_speed_check_bytes_threshold.get(); + + dl.current.set(cur); + self.updated_at.set(now); + + if delta >= threshold { + self.next_speed_check.set(now + self.timeout.dur); + self.next_speed_check_bytes_threshold + .set(u64::from(self.timeout.low_speed_limit)); + } else { + self.next_speed_check_bytes_threshold.set(threshold - delta); + } + } + if self.tick(WhyTick::DownloadUpdate).is_err() { + return false; + } + + // If we've spent too long not actually receiving any data we time out. + if now - self.updated_at.get() > self.timeout.dur { + self.updated_at.set(now); + let msg = format!( + "failed to download any data for `{}` within {}s", + dl.id, + self.timeout.dur.as_secs() + ); + dl.timed_out.set(Some(msg)); + return false; + } + + // If we reached the point in time that we need to check our speed + // limit, see if we've transferred enough data during this threshold. If + // it fails this check then we fail because the download is going too + // slowly. + if now >= self.next_speed_check.get() { + self.next_speed_check.set(now + self.timeout.dur); + assert!(self.next_speed_check_bytes_threshold.get() > 0); + let msg = format!( + "download of `{}` failed to transfer more \ + than {} bytes in {}s", + dl.id, + self.timeout.low_speed_limit, + self.timeout.dur.as_secs() + ); + dl.timed_out.set(Some(msg)); + return false; + } + + true } - pub fn iter(&self) -> slice::Iter { - self.packages.iter() + fn tick(&self, why: WhyTick<'_>) -> CargoResult<()> { + let mut progress = self.progress.borrow_mut(); + let progress = progress.as_mut().unwrap(); + + if let WhyTick::DownloadUpdate = why { + if !progress.update_allowed() { + return Ok(()); + } + } + let pending = self.pending.len(); + let mut msg = if pending == 1 { + format!("{} crate", pending) + } else { + format!("{} crates", pending) + }; + match why { + WhyTick::Extracting(krate) => { + msg.push_str(&format!(", extracting {} ...", krate)); + } + _ => { + let mut dur = Duration::new(0, 0); + let mut remaining = 0; + for (dl, _) in self.pending.values() { + dur += dl.start.elapsed(); + // If the total/current look weird just throw out the data + // point, sounds like curl has more to learn before we have + // the true information. + if dl.total.get() >= dl.current.get() { + remaining += dl.total.get() - dl.current.get(); + } + } + if remaining > 0 && dur > Duration::from_millis(500) { + msg.push_str(&format!(", remaining bytes: {}", ByteSize(remaining))); + } + } + } + progress.print_now(&msg) } } -impl Registry for PackageSet { - fn query(&mut self, name: &Dependency) -> CargoResult> { - Ok(self.packages.iter() - .filter(|pkg| name.name() == pkg.name()) - .map(|pkg| pkg.summary().clone()) - .collect()) +#[derive(Copy, Clone)] +enum WhyTick<'a> { + DownloadStarted, + DownloadUpdate, + DownloadFinished, + Extracting(&'a str), +} + +impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> { + fn drop(&mut self) { + self.set.downloading.set(false); + let progress = self.progress.get_mut().take().unwrap(); + // Don't print a download summary if we're not using a progress bar, + // we've already printed lots of `Downloading...` items. + if !progress.is_enabled() { + return; + } + // If we didn't download anything, no need for a summary. + if self.downloads_finished == 0 { + return; + } + // If an error happened, let's not clutter up the output. + if !self.success { + return; + } + // pick the correct plural of crate(s) + let crate_string = if self.downloads_finished == 1 { + "crate" + } else { + "crates" + }; + let mut status = format!( + "{} {} ({}) in {}", + self.downloads_finished, + crate_string, + ByteSize(self.downloaded_bytes), + util::elapsed(self.start.elapsed()) + ); + // print the size of largest crate if it was >1mb + // however don't print if only a single crate was downloaded + // because it is obvious that it will be the largest then + if self.largest.0 > ByteSize::mb(1).0 && self.downloads_finished > 1 { + status.push_str(&format!( + " (largest was `{}` at {})", + self.largest.1, + ByteSize(self.largest.0), + )); + } + // Clear progress before displaying final summary. + drop(progress); + drop(self.set.config.shell().status("Downloaded", status)); + } +} + +mod tls { + use std::cell::Cell; + + use super::Downloads; + + thread_local!(static PTR: Cell = Cell::new(0)); + + pub(crate) fn with(f: impl FnOnce(Option<&Downloads<'_, '_>>) -> R) -> R { + let ptr = PTR.with(|p| p.get()); + if ptr == 0 { + f(None) + } else { + unsafe { f(Some(&*(ptr as *const Downloads<'_, '_>))) } + } + } + + pub(crate) fn set(dl: &Downloads<'_, '_>, f: impl FnOnce() -> R) -> R { + struct Reset<'a, T: Copy>(&'a Cell, T); + + impl<'a, T: Copy> Drop for Reset<'a, T> { + fn drop(&mut self) { + self.0.set(self.1); + } + } + + PTR.with(|p| { + let _reset = Reset(p, p.get()); + p.set(dl as *const Downloads<'_, '_> as usize); + f() + }) } } diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs index 25bfc2796ad..208c3ba3e1b 100644 --- a/src/cargo/core/package_id.rs +++ b/src/cargo/core/package_id.rs @@ -1,199 +1,261 @@ -use std::cmp::Ordering; -use std::error::Error; +use std::collections::HashSet; use std::fmt::{self, Formatter}; -use std::hash::Hash; use std::hash; +use std::hash::Hash; use std::path::Path; -use std::sync::Arc; +use std::ptr; +use std::sync::Mutex; -use regex::Regex; -use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use semver; +use serde::de; +use serde::ser; -use util::{CargoResult, CargoError, short_hash, ToSemver}; -use core::source::SourceId; +use crate::core::interning::InternedString; +use crate::core::source::SourceId; +use crate::util::{CargoResult, ToSemver}; + +lazy_static::lazy_static! { + static ref PACKAGE_ID_CACHE: Mutex> = + Mutex::new(HashSet::new()); +} /// Identifier for a specific version of a package in a specific source. -#[derive(Clone, Debug)] +#[derive(Clone, Copy, Eq, PartialOrd, Ord)] pub struct PackageId { - inner: Arc, + inner: &'static PackageIdInner, } -#[derive(PartialEq, PartialOrd, Eq, Ord, Debug)] +#[derive(PartialOrd, Eq, Ord)] struct PackageIdInner { - name: String, + name: InternedString, version: semver::Version, source_id: SourceId, } -impl Encodable for PackageId { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let source = self.inner.source_id.to_url(); - let encoded = format!("{} {} ({})", self.inner.name, self.inner.version, - source); - encoded.encode(s) +// Custom equality that uses full equality of SourceId, rather than its custom equality. +impl PartialEq for PackageIdInner { + fn eq(&self, other: &Self) -> bool { + self.name == other.name + && self.version == other.version + && self.source_id.full_eq(other.source_id) } } -impl Decodable for PackageId { - fn decode(d: &mut D) -> Result { - let string: String = try!(Decodable::decode(d)); - let regex = Regex::new(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$").unwrap(); - let captures = regex.captures(&string).expect("invalid serialized PackageId"); - - let name = captures.at(1).unwrap(); - let version = captures.at(2).unwrap(); - let url = captures.at(3).unwrap(); - let version = semver::Version::parse(version).ok().expect("invalid version"); - let source_id = SourceId::from_url(url.to_string()); +// Custom hash that is coherent with the custom equality above. +impl Hash for PackageIdInner { + fn hash(&self, into: &mut S) { + self.name.hash(into); + self.version.hash(into); + self.source_id.full_hash(into); + } +} - Ok(PackageId { - inner: Arc::new(PackageIdInner { - name: name.to_string(), - version: version, - source_id: source_id, - }), - }) +impl ser::Serialize for PackageId { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + s.collect_str(&format_args!( + "{} {} ({})", + self.inner.name, + self.inner.version, + self.inner.source_id.into_url() + )) } } -impl Hash for PackageId { - fn hash(&self, state: &mut S) { - self.inner.name.hash(state); - self.inner.version.hash(state); - self.inner.source_id.hash(state); +impl<'de> de::Deserialize<'de> for PackageId { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let string = String::deserialize(d)?; + let mut s = string.splitn(3, ' '); + let name = s.next().unwrap(); + let name = InternedString::new(name); + let version = match s.next() { + Some(s) => s, + None => return Err(de::Error::custom("invalid serialized PackageId")), + }; + let version = version.to_semver().map_err(de::Error::custom)?; + let url = match s.next() { + Some(s) => s, + None => return Err(de::Error::custom("invalid serialized PackageId")), + }; + let url = if url.starts_with('(') && url.ends_with(')') { + &url[1..url.len() - 1] + } else { + return Err(de::Error::custom("invalid serialized PackageId")); + }; + let source_id = SourceId::from_url(url).map_err(de::Error::custom)?; + + Ok(PackageId::pure(name, version, source_id)) } } impl PartialEq for PackageId { fn eq(&self, other: &PackageId) -> bool { - (*self.inner).eq(&*other.inner) - } -} -impl PartialOrd for PackageId { - fn partial_cmp(&self, other: &PackageId) -> Option { - (*self.inner).partial_cmp(&*other.inner) - } -} -impl Eq for PackageId {} -impl Ord for PackageId { - fn cmp(&self, other: &PackageId) -> Ordering { - (*self.inner).cmp(&*other.inner) + if ptr::eq(self.inner, other.inner) { + return true; + } + self.inner.name == other.inner.name + && self.inner.version == other.inner.version + && self.inner.source_id == other.inner.source_id } } -#[derive(Clone, Debug, PartialEq)] -pub enum PackageIdError { - InvalidVersion(String), - InvalidNamespace(String) +impl Hash for PackageId { + fn hash(&self, state: &mut S) { + self.inner.name.hash(state); + self.inner.version.hash(state); + self.inner.source_id.hash(state); + } } -impl Error for PackageIdError { - fn description(&self) -> &str { "failed to parse package id" } -} +impl PackageId { + pub fn new(name: &str, version: T, sid: SourceId) -> CargoResult { + let v = version.to_semver()?; + Ok(PackageId::pure(InternedString::new(name), v, sid)) + } -impl fmt::Display for PackageIdError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - PackageIdError::InvalidVersion(ref v) => { - write!(f, "invalid version: {}", *v) - } - PackageIdError::InvalidNamespace(ref ns) => { - write!(f, "invalid namespace: {}", *ns) - } - } + pub fn pure(name: InternedString, version: semver::Version, source_id: SourceId) -> PackageId { + let inner = PackageIdInner { + name, + version, + source_id, + }; + let mut cache = PACKAGE_ID_CACHE.lock().unwrap(); + let inner = cache.get(&inner).cloned().unwrap_or_else(|| { + let inner = Box::leak(Box::new(inner)); + cache.insert(inner); + inner + }); + PackageId { inner } } -} -impl CargoError for PackageIdError { - fn is_human(&self) -> bool { true } -} + pub fn name(self) -> InternedString { + self.inner.name + } + pub fn version(self) -> &'static semver::Version { + &self.inner.version + } + pub fn source_id(self) -> SourceId { + self.inner.source_id + } -impl From for Box { - fn from(t: PackageIdError) -> Box { Box::new(t) } -} + pub fn with_precise(self, precise: Option) -> PackageId { + PackageId::pure( + self.inner.name, + self.inner.version.clone(), + self.inner.source_id.with_precise(precise), + ) + } -#[derive(PartialEq, Eq, Hash, Clone, RustcEncodable, Debug)] -pub struct Metadata { - pub metadata: String, - pub extra_filename: String -} + pub fn with_source_id(self, source: SourceId) -> PackageId { + PackageId::pure(self.inner.name, self.inner.version.clone(), source) + } -impl PackageId { - pub fn new(name: &str, version: T, - sid: &SourceId) -> CargoResult { - let v = try!(version.to_semver().map_err(PackageIdError::InvalidVersion)); - Ok(PackageId { - inner: Arc::new(PackageIdInner { - name: name.to_string(), - version: v, - source_id: sid.clone(), - }), - }) - } - - pub fn name(&self) -> &str { &self.inner.name } - pub fn version(&self) -> &semver::Version { &self.inner.version } - pub fn source_id(&self) -> &SourceId { &self.inner.source_id } - - pub fn generate_metadata(&self, source_root: &Path) -> Metadata { - // See comments in Package::hash for why we have this test - let metadata = if self.inner.source_id.is_path() { - short_hash(&(0, &self.inner.name, &self.inner.version, source_root)) + pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Self { + if self.source_id() == to_replace { + self.with_source_id(replace_with) } else { - short_hash(&(1, self)) - }; - let extra_filename = format!("-{}", metadata); - - Metadata { metadata: metadata, extra_filename: extra_filename } + self + } } - pub fn with_precise(&self, precise: Option) -> PackageId { - PackageId { - inner: Arc::new(PackageIdInner { - name: self.inner.name.to_string(), - version: self.inner.version.clone(), - source_id: self.inner.source_id.with_precise(precise), - }), - } + pub fn stable_hash(self, workspace: &Path) -> PackageIdStableHash<'_> { + PackageIdStableHash(self, workspace) } } -impl Metadata { - pub fn mix(&mut self, t: &T) { - let new_metadata = short_hash(&(&self.metadata, t)); - self.extra_filename = format!("-{}", new_metadata); - self.metadata = new_metadata; +pub struct PackageIdStableHash<'a>(PackageId, &'a Path); + +impl<'a> Hash for PackageIdStableHash<'a> { + fn hash(&self, state: &mut S) { + self.0.inner.name.hash(state); + self.0.inner.version.hash(state); + self.0.inner.source_id.stable_hash(self.1, state); } } impl fmt::Display for PackageId { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - try!(write!(f, "{} v{}", self.inner.name, self.inner.version)); + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{} v{}", self.inner.name, self.inner.version)?; if !self.inner.source_id.is_default_registry() { - try!(write!(f, " ({})", self.inner.source_id)); + write!(f, " ({})", self.inner.source_id)?; } Ok(()) } } +impl fmt::Debug for PackageId { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("PackageId") + .field("name", &self.inner.name) + .field("version", &self.inner.version.to_string()) + .field("source", &self.inner.source_id.to_string()) + .finish() + } +} + #[cfg(test)] mod tests { use super::PackageId; - use core::source::SourceId; - use sources::RegistrySource; - use util::ToUrl; + use crate::core::source::SourceId; + use crate::sources::CRATES_IO_INDEX; + use crate::util::IntoUrl; #[test] fn invalid_version_handled_nicely() { - let loc = RegistrySource::default_url().to_url().unwrap(); - let repo = SourceId::for_registry(&loc); + let loc = CRATES_IO_INDEX.into_url().unwrap(); + let repo = SourceId::for_registry(&loc).unwrap(); + + assert!(PackageId::new("foo", "1.0", repo).is_err()); + assert!(PackageId::new("foo", "1", repo).is_err()); + assert!(PackageId::new("foo", "bar", repo).is_err()); + assert!(PackageId::new("foo", "", repo).is_err()); + } + + #[test] + fn debug() { + let loc = CRATES_IO_INDEX.into_url().unwrap(); + let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap(); + assert_eq!(r#"PackageId { name: "foo", version: "1.0.0", source: "registry `https://github.com/rust-lang/crates.io-index`" }"#, format!("{:?}", pkg_id)); + + let expected = r#" +PackageId { + name: "foo", + version: "1.0.0", + source: "registry `https://github.com/rust-lang/crates.io-index`", +} +"# + .trim(); + + // Can be removed once trailing commas in Debug have reached the stable + // channel. + let expected_without_trailing_comma = r#" +PackageId { + name: "foo", + version: "1.0.0", + source: "registry `https://github.com/rust-lang/crates.io-index`" +} +"# + .trim(); + + let actual = format!("{:#?}", pkg_id); + if actual.ends_with(",\n}") { + assert_eq!(actual, expected); + } else { + assert_eq!(actual, expected_without_trailing_comma); + } + } - assert!(PackageId::new("foo", "1.0", &repo).is_err()); - assert!(PackageId::new("foo", "1", &repo).is_err()); - assert!(PackageId::new("foo", "bar", &repo).is_err()); - assert!(PackageId::new("foo", "", &repo).is_err()); + #[test] + fn display() { + let loc = CRATES_IO_INDEX.into_url().unwrap(); + let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap(); + assert_eq!("foo v1.0.0", pkg_id.to_string()); } } diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs index a950e6006bc..9fd01cf1984 100644 --- a/src/cargo/core/package_id_spec.rs +++ b/src/cargo/core/package_id_spec.rs @@ -1,170 +1,283 @@ +use std::collections::HashMap; use std::fmt; + use semver::Version; -use url::{self, Url, UrlParser}; +use serde::{de, ser}; +use url::Url; -use core::PackageId; -use util::{CargoResult, ToUrl, human, ToSemver, ChainError}; +use crate::core::interning::InternedString; +use crate::core::PackageId; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{validate_package_name, IntoUrl, ToSemver}; -#[derive(Clone, PartialEq, Eq, Debug)] +/// Some or all of the data required to identify a package: +/// +/// 1. the package name (a `String`, required) +/// 2. the package version (a `Version`, optional) +/// 3. the package source (a `Url`, optional) +/// +/// If any of the optional fields are omitted, then the package ID may be ambiguous, there may be +/// more than one package/version/url combo that will match. However, often just the name is +/// sufficient to uniquely define a package ID. +#[derive(Clone, PartialEq, Eq, Debug, Hash, Ord, PartialOrd)] pub struct PackageIdSpec { - name: String, + name: InternedString, version: Option, url: Option, } impl PackageIdSpec { + /// Parses a spec string and returns a `PackageIdSpec` if the string was valid. + /// + /// # Examples + /// Some examples of valid strings + /// + /// ``` + /// use cargo::core::PackageIdSpec; + /// + /// let specs = vec![ + /// "https://crates.io/foo#1.2.3", + /// "https://crates.io/foo#bar:1.2.3", + /// "crates.io/foo", + /// "crates.io/foo#1.2.3", + /// "crates.io/foo#bar", + /// "crates.io/foo#bar:1.2.3", + /// "foo", + /// "foo:1.2.3", + /// ]; + /// for spec in specs { + /// assert!(PackageIdSpec::parse(spec).is_ok()); + /// } pub fn parse(spec: &str) -> CargoResult { - if spec.contains("/") { - match spec.to_url() { - Ok(url) => return PackageIdSpec::from_url(url), - Err(..) => {} + if spec.contains('/') { + if let Ok(url) = spec.into_url() { + return PackageIdSpec::from_url(url); } if !spec.contains("://") { - match url(&format!("cargo://{}", spec)) { - Ok(url) => return PackageIdSpec::from_url(url), - Err(..) => {} + if let Ok(url) = Url::parse(&format!("cargo://{}", spec)) { + return PackageIdSpec::from_url(url); } } } let mut parts = spec.splitn(2, ':'); let name = parts.next().unwrap(); let version = match parts.next() { - Some(version) => Some(try!(Version::parse(version).map_err(human))), + Some(version) => Some(version.to_semver()?), None => None, }; - for ch in name.chars() { - if !ch.is_alphanumeric() && ch != '_' && ch != '-' { - return Err(human(format!("invalid character in pkgid `{}`: `{}`", - spec, ch))) - } - } + validate_package_name(name, "pkgid", "")?; Ok(PackageIdSpec { - name: name.to_string(), - version: version, + name: InternedString::new(name), + version, url: None, }) } - pub fn from_package_id(package_id: &PackageId) -> PackageIdSpec { + /// Roughly equivalent to `PackageIdSpec::parse(spec)?.query(i)` + pub fn query_str(spec: &str, i: I) -> CargoResult + where + I: IntoIterator, + { + let spec = PackageIdSpec::parse(spec) + .chain_err(|| failure::format_err!("invalid package ID specification: `{}`", spec))?; + spec.query(i) + } + + /// Convert a `PackageId` to a `PackageIdSpec`, which will have both the `Version` and `Url` + /// fields filled in. + pub fn from_package_id(package_id: PackageId) -> PackageIdSpec { PackageIdSpec { - name: package_id.name().to_string(), + name: package_id.name(), version: Some(package_id.version().clone()), url: Some(package_id.source_id().url().clone()), } } + /// Tries to convert a valid `Url` to a `PackageIdSpec`. fn from_url(mut url: Url) -> CargoResult { - if url.query.is_some() { - return Err(human(format!("cannot have a query string in a pkgid: {}", - url))); + if url.query().is_some() { + failure::bail!("cannot have a query string in a pkgid: {}", url) } - let frag = url.fragment.take(); + let frag = url.fragment().map(|s| s.to_owned()); + url.set_fragment(None); let (name, version) = { - let path = try!(url.path().chain_error(|| { - human(format!("pkgid urls must have a path: {}", url)) - })); - let path_name = try!(path.last().chain_error(|| { - human(format!("pkgid urls must have at least one path \ - component: {}", url)) - })); + let mut path = url + .path_segments() + .ok_or_else(|| failure::format_err!("pkgid urls must have a path: {}", url))?; + let path_name = path.next_back().ok_or_else(|| { + failure::format_err!( + "pkgid urls must have at least one path \ + component: {}", + url + ) + })?; match frag { Some(fragment) => { let mut parts = fragment.splitn(2, ':'); let name_or_version = parts.next().unwrap(); match parts.next() { Some(part) => { - let version = try!(part.to_semver().map_err(human)); - (name_or_version.to_string(), Some(version)) + let version = part.to_semver()?; + (InternedString::new(name_or_version), Some(version)) } None => { - if name_or_version.chars().next().unwrap() - .is_alphabetic() { - (name_or_version.to_string(), None) + if name_or_version.chars().next().unwrap().is_alphabetic() { + (InternedString::new(name_or_version), None) } else { - let version = try!(name_or_version.to_semver() - .map_err(human)); - (path_name.to_string(), Some(version)) + let version = name_or_version.to_semver()?; + (InternedString::new(path_name), Some(version)) } } } } - None => (path_name.to_string(), None), + None => (InternedString::new(path_name), None), } }; Ok(PackageIdSpec { - name: name, - version: version, + name, + version, url: Some(url), }) } - pub fn name(&self) -> &str { &self.name } - pub fn version(&self) -> Option<&Version> { self.version.as_ref() } - pub fn url(&self) -> Option<&Url> { self.url.as_ref() } + pub fn name(&self) -> InternedString { + self.name + } + + pub fn version(&self) -> Option<&Version> { + self.version.as_ref() + } - pub fn matches(&self, package_id: &PackageId) -> bool { - if self.name() != package_id.name() { return false } + pub fn url(&self) -> Option<&Url> { + self.url.as_ref() + } - match self.version { - Some(ref v) => if v != package_id.version() { return false }, - None => {} + pub fn set_url(&mut self, url: Url) { + self.url = Some(url); + } + + /// Checks whether the given `PackageId` matches the `PackageIdSpec`. + pub fn matches(&self, package_id: PackageId) -> bool { + if self.name() != package_id.name() { + return false; + } + + if let Some(ref v) = self.version { + if v != package_id.version() { + return false; + } } match self.url { Some(ref u) => u == package_id.source_id().url(), - None => true + None => true, } } -} -fn url(s: &str) -> url::ParseResult { - return UrlParser::new().scheme_type_mapper(mapper).parse(s); + /// Checks a list of `PackageId`s to find 1 that matches this `PackageIdSpec`. If 0, 2, or + /// more are found, then this returns an error. + pub fn query(&self, i: I) -> CargoResult + where + I: IntoIterator, + { + let mut ids = i.into_iter().filter(|p| self.matches(*p)); + let ret = match ids.next() { + Some(id) => id, + None => failure::bail!( + "package ID specification `{}` \ + matched no packages", + self + ), + }; + return match ids.next() { + Some(other) => { + let mut msg = format!( + "There are multiple `{}` packages in \ + your project, and the specification \ + `{}` is ambiguous.\n\ + Please re-run this command \ + with `-p ` where `` is one \ + of the following:", + self.name(), + self + ); + let mut vec = vec![ret, other]; + vec.extend(ids); + minimize(&mut msg, &vec, self); + Err(failure::format_err!("{}", msg)) + } + None => Ok(ret), + }; - fn mapper(scheme: &str) -> url::SchemeType { - if scheme == "cargo" { - url::SchemeType::Relative(1) - } else { - url::whatwg_scheme_type_mapper(scheme) + fn minimize(msg: &mut String, ids: &[PackageId], spec: &PackageIdSpec) { + let mut version_cnt = HashMap::new(); + for id in ids { + *version_cnt.entry(id.version()).or_insert(0) += 1; + } + for id in ids { + if version_cnt[id.version()] == 1 { + msg.push_str(&format!("\n {}:{}", spec.name(), id.version())); + } else { + msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id))); + } + } } } - } impl fmt::Display for PackageIdSpec { - #[allow(deprecated)] // connect => join in 1.3 - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut printed_name = false; match self.url { Some(ref url) => { - if url.scheme == "cargo" { - try!(write!(f, "{}/{}", url.host().unwrap(), - url.path().unwrap().connect("/"))); + if url.scheme() == "cargo" { + write!(f, "{}{}", url.host().unwrap(), url.path())?; } else { - try!(write!(f, "{}", url)); + write!(f, "{}", url)?; } - if url.path().unwrap().last().unwrap() != &self.name { + if url.path_segments().unwrap().next_back().unwrap() != &*self.name { printed_name = true; - try!(write!(f, "#{}", self.name)); + write!(f, "#{}", self.name)?; } } - None => { printed_name = true; try!(write!(f, "{}", self.name)) } - } - match self.version { - Some(ref v) => { - try!(write!(f, "{}{}", if printed_name {":"} else {"#"}, v)); + None => { + printed_name = true; + write!(f, "{}", self.name)? } - None => {} + } + if let Some(ref v) = self.version { + write!(f, "{}{}", if printed_name { ":" } else { "#" }, v)?; } Ok(()) } } +impl ser::Serialize for PackageIdSpec { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + self.to_string().serialize(s) + } +} + +impl<'de> de::Deserialize<'de> for PackageIdSpec { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let string = String::deserialize(d)?; + PackageIdSpec::parse(&string).map_err(de::Error::custom) + } +} + #[cfg(test)] mod tests { - use core::{PackageId, SourceId}; - use super::{PackageIdSpec, url}; + use super::PackageIdSpec; + use crate::core::interning::InternedString; + use crate::core::{PackageId, SourceId}; + use crate::util::ToSemver; use url::Url; - use semver::Version; #[test] fn good_parsing() { @@ -174,66 +287,91 @@ mod tests { assert_eq!(parsed.to_string(), spec); } - ok("http://crates.io/foo#1.2.3", PackageIdSpec { - name: "foo".to_string(), - version: Some(Version::parse("1.2.3").unwrap()), - url: Some(url("http://crates.io/foo").unwrap()), - }); - ok("http://crates.io/foo#bar:1.2.3", PackageIdSpec { - name: "bar".to_string(), - version: Some(Version::parse("1.2.3").unwrap()), - url: Some(url("http://crates.io/foo").unwrap()), - }); - ok("crates.io/foo", PackageIdSpec { - name: "foo".to_string(), - version: None, - url: Some(url("cargo://crates.io/foo").unwrap()), - }); - ok("crates.io/foo#1.2.3", PackageIdSpec { - name: "foo".to_string(), - version: Some(Version::parse("1.2.3").unwrap()), - url: Some(url("cargo://crates.io/foo").unwrap()), - }); - ok("crates.io/foo#bar", PackageIdSpec { - name: "bar".to_string(), - version: None, - url: Some(url("cargo://crates.io/foo").unwrap()), - }); - ok("crates.io/foo#bar:1.2.3", PackageIdSpec { - name: "bar".to_string(), - version: Some(Version::parse("1.2.3").unwrap()), - url: Some(url("cargo://crates.io/foo").unwrap()), - }); - ok("foo", PackageIdSpec { - name: "foo".to_string(), - version: None, - url: None, - }); - ok("foo:1.2.3", PackageIdSpec { - name: "foo".to_string(), - version: Some(Version::parse("1.2.3").unwrap()), - url: None, - }); + ok( + "https://crates.io/foo#1.2.3", + PackageIdSpec { + name: InternedString::new("foo"), + version: Some("1.2.3".to_semver().unwrap()), + url: Some(Url::parse("https://crates.io/foo").unwrap()), + }, + ); + ok( + "https://crates.io/foo#bar:1.2.3", + PackageIdSpec { + name: InternedString::new("bar"), + version: Some("1.2.3".to_semver().unwrap()), + url: Some(Url::parse("https://crates.io/foo").unwrap()), + }, + ); + ok( + "crates.io/foo", + PackageIdSpec { + name: InternedString::new("foo"), + version: None, + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }, + ); + ok( + "crates.io/foo#1.2.3", + PackageIdSpec { + name: InternedString::new("foo"), + version: Some("1.2.3".to_semver().unwrap()), + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }, + ); + ok( + "crates.io/foo#bar", + PackageIdSpec { + name: InternedString::new("bar"), + version: None, + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }, + ); + ok( + "crates.io/foo#bar:1.2.3", + PackageIdSpec { + name: InternedString::new("bar"), + version: Some("1.2.3".to_semver().unwrap()), + url: Some(Url::parse("cargo://crates.io/foo").unwrap()), + }, + ); + ok( + "foo", + PackageIdSpec { + name: InternedString::new("foo"), + version: None, + url: None, + }, + ); + ok( + "foo:1.2.3", + PackageIdSpec { + name: InternedString::new("foo"), + version: Some("1.2.3".to_semver().unwrap()), + url: None, + }, + ); } #[test] fn bad_parsing() { assert!(PackageIdSpec::parse("baz:").is_err()); + assert!(PackageIdSpec::parse("baz:*").is_err()); assert!(PackageIdSpec::parse("baz:1.0").is_err()); - assert!(PackageIdSpec::parse("http://baz:1.0").is_err()); - assert!(PackageIdSpec::parse("http://#baz:1.0").is_err()); + assert!(PackageIdSpec::parse("https://baz:1.0").is_err()); + assert!(PackageIdSpec::parse("https://#baz:1.0").is_err()); } #[test] fn matching() { - let url = Url::parse("http://example.com").unwrap(); - let sid = SourceId::for_registry(&url); - let foo = PackageId::new("foo", "1.2.3", &sid).unwrap(); - let bar = PackageId::new("bar", "1.2.3", &sid).unwrap(); - - assert!( PackageIdSpec::parse("foo").unwrap().matches(&foo)); - assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar)); - assert!( PackageIdSpec::parse("foo:1.2.3").unwrap().matches(&foo)); - assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(&foo)); + let url = Url::parse("https://example.com").unwrap(); + let sid = SourceId::for_registry(&url).unwrap(); + let foo = PackageId::new("foo", "1.2.3", sid).unwrap(); + let bar = PackageId::new("bar", "1.2.3", sid).unwrap(); + + assert!(PackageIdSpec::parse("foo").unwrap().matches(foo)); + assert!(!PackageIdSpec::parse("foo").unwrap().matches(bar)); + assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(foo)); + assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(foo)); } } diff --git a/src/cargo/core/profiles.rs b/src/cargo/core/profiles.rs new file mode 100644 index 00000000000..c20b5800a80 --- /dev/null +++ b/src/cargo/core/profiles.rs @@ -0,0 +1,691 @@ +use std::collections::HashSet; +use std::{cmp, env, fmt, hash}; + +use serde::Deserialize; + +use crate::core::compiler::CompileMode; +use crate::core::interning::InternedString; +use crate::core::{Features, PackageId, PackageIdSpec, PackageSet, Shell}; +use crate::util::errors::CargoResultExt; +use crate::util::toml::{ProfilePackageSpec, StringOrBool, TomlProfile, TomlProfiles, U32OrBool}; +use crate::util::{closest_msg, CargoResult, Config}; + +/// Collection of all user profiles. +#[derive(Clone, Debug)] +pub struct Profiles { + dev: ProfileMaker, + release: ProfileMaker, + test: ProfileMaker, + bench: ProfileMaker, + doc: ProfileMaker, + /// Incremental compilation can be overridden globally via: + /// - `CARGO_INCREMENTAL` environment variable. + /// - `build.incremental` config value. + incremental: Option, +} + +impl Profiles { + pub fn new( + profiles: Option<&TomlProfiles>, + config: &Config, + features: &Features, + warnings: &mut Vec, + ) -> CargoResult { + if let Some(profiles) = profiles { + profiles.validate(features, warnings)?; + } + + let config_profiles = config.profiles()?; + + let incremental = match env::var_os("CARGO_INCREMENTAL") { + Some(v) => Some(v == "1"), + None => config.get::>("build.incremental")?, + }; + + Ok(Profiles { + dev: ProfileMaker { + default: Profile::default_dev(), + toml: profiles.and_then(|p| p.dev.clone()), + config: config_profiles.dev.clone(), + }, + release: ProfileMaker { + default: Profile::default_release(), + toml: profiles.and_then(|p| p.release.clone()), + config: config_profiles.release.clone(), + }, + test: ProfileMaker { + default: Profile::default_test(), + toml: profiles.and_then(|p| p.test.clone()), + config: None, + }, + bench: ProfileMaker { + default: Profile::default_bench(), + toml: profiles.and_then(|p| p.bench.clone()), + config: None, + }, + doc: ProfileMaker { + default: Profile::default_doc(), + toml: profiles.and_then(|p| p.doc.clone()), + config: None, + }, + incremental, + }) + } + + /// Retrieves the profile for a target. + /// `is_member` is whether or not this package is a member of the + /// workspace. + pub fn get_profile( + &self, + pkg_id: PackageId, + is_member: bool, + unit_for: UnitFor, + mode: CompileMode, + release: bool, + ) -> Profile { + let maker = match mode { + CompileMode::Test | CompileMode::Bench => { + if release { + &self.bench + } else { + &self.test + } + } + CompileMode::Build + | CompileMode::Check { .. } + | CompileMode::Doctest + | CompileMode::RunCustomBuild => { + // Note: `RunCustomBuild` doesn't normally use this code path. + // `build_unit_profiles` normally ensures that it selects the + // ancestor's profile. However, `cargo clean -p` can hit this + // path. + if release { + &self.release + } else { + &self.dev + } + } + CompileMode::Doc { .. } => &self.doc, + }; + let mut profile = maker.get_profile(Some(pkg_id), is_member, unit_for); + // `panic` should not be set for tests/benches, or any of their + // dependencies. + if !unit_for.is_panic_abort_ok() || mode.is_any_test() { + profile.panic = PanicStrategy::Unwind; + } + + // Incremental can be globally overridden. + if let Some(v) = self.incremental { + profile.incremental = v; + } + // Only enable incremental compilation for sources the user can + // modify (aka path sources). For things that change infrequently, + // non-incremental builds yield better performance in the compiler + // itself (aka crates.io / git dependencies) + // + // (see also https://github.com/rust-lang/cargo/issues/3972) + if !pkg_id.source_id().is_path() { + profile.incremental = false; + } + profile + } + + /// The profile for *running* a `build.rs` script is only used for setting + /// a few environment variables. To ensure proper de-duplication of the + /// running `Unit`, this uses a stripped-down profile (so that unrelated + /// profile flags don't cause `build.rs` to needlessly run multiple + /// times). + pub fn get_profile_run_custom_build(&self, for_unit_profile: &Profile) -> Profile { + let mut result = Profile::default(); + result.debuginfo = for_unit_profile.debuginfo; + result.opt_level = for_unit_profile.opt_level; + result + } + + /// This returns a generic base profile. This is currently used for the + /// `[Finished]` line. It is not entirely accurate, since it doesn't + /// select for the package that was actually built. + pub fn base_profile(&self, release: bool) -> Profile { + if release { + self.release.get_profile(None, true, UnitFor::new_normal()) + } else { + self.dev.get_profile(None, true, UnitFor::new_normal()) + } + } + + /// Used to check for overrides for non-existing packages. + pub fn validate_packages( + &self, + shell: &mut Shell, + packages: &PackageSet<'_>, + ) -> CargoResult<()> { + self.dev.validate_packages(shell, packages)?; + self.release.validate_packages(shell, packages)?; + self.test.validate_packages(shell, packages)?; + self.bench.validate_packages(shell, packages)?; + self.doc.validate_packages(shell, packages)?; + Ok(()) + } +} + +/// An object used for handling the profile override hierarchy. +/// +/// The precedence of profiles are (first one wins): +/// - Profiles in `.cargo/config` files (using same order as below). +/// - [profile.dev.overrides.name] -- a named package. +/// - [profile.dev.overrides."*"] -- this cannot apply to workspace members. +/// - [profile.dev.build-override] -- this can only apply to `build.rs` scripts +/// and their dependencies. +/// - [profile.dev] +/// - Default (hard-coded) values. +#[derive(Debug, Clone)] +struct ProfileMaker { + /// The starting, hard-coded defaults for the profile. + default: Profile, + /// The profile from the `Cargo.toml` manifest. + toml: Option, + /// Profile loaded from `.cargo/config` files. + config: Option, +} + +impl ProfileMaker { + fn get_profile( + &self, + pkg_id: Option, + is_member: bool, + unit_for: UnitFor, + ) -> Profile { + let mut profile = self.default; + if let Some(ref toml) = self.toml { + merge_toml(pkg_id, is_member, unit_for, &mut profile, toml); + } + if let Some(ref toml) = self.config { + merge_toml(pkg_id, is_member, unit_for, &mut profile, toml); + } + profile + } + + fn validate_packages(&self, shell: &mut Shell, packages: &PackageSet<'_>) -> CargoResult<()> { + self.validate_packages_toml(shell, packages, &self.toml, true)?; + self.validate_packages_toml(shell, packages, &self.config, false)?; + Ok(()) + } + + fn validate_packages_toml( + &self, + shell: &mut Shell, + packages: &PackageSet<'_>, + toml: &Option, + warn_unmatched: bool, + ) -> CargoResult<()> { + let toml = match *toml { + Some(ref toml) => toml, + None => return Ok(()), + }; + let overrides = match toml.overrides { + Some(ref overrides) => overrides, + None => return Ok(()), + }; + // Verify that a package doesn't match multiple spec overrides. + let mut found = HashSet::new(); + for pkg_id in packages.package_ids() { + let matches: Vec<&PackageIdSpec> = overrides + .keys() + .filter_map(|key| match *key { + ProfilePackageSpec::All => None, + ProfilePackageSpec::Spec(ref spec) => { + if spec.matches(pkg_id) { + Some(spec) + } else { + None + } + } + }) + .collect(); + match matches.len() { + 0 => {} + 1 => { + found.insert(matches[0].clone()); + } + _ => { + let specs = matches + .iter() + .map(|spec| spec.to_string()) + .collect::>() + .join(", "); + failure::bail!( + "multiple profile overrides in profile `{}` match package `{}`\n\ + found profile override specs: {}", + self.default.name, + pkg_id, + specs + ); + } + } + } + + if !warn_unmatched { + return Ok(()); + } + // Verify every override matches at least one package. + let missing_specs = overrides.keys().filter_map(|key| { + if let ProfilePackageSpec::Spec(ref spec) = *key { + if !found.contains(spec) { + return Some(spec); + } + } + None + }); + for spec in missing_specs { + // See if there is an exact name match. + let name_matches: Vec = packages + .package_ids() + .filter_map(|pkg_id| { + if pkg_id.name() == spec.name() { + Some(pkg_id.to_string()) + } else { + None + } + }) + .collect(); + if name_matches.is_empty() { + let suggestion = + closest_msg(&spec.name(), packages.package_ids(), |p| p.name().as_str()); + shell.warn(format!( + "profile override spec `{}` did not match any packages{}", + spec, suggestion + ))?; + } else { + shell.warn(format!( + "version or URL in profile override spec `{}` does not \ + match any of the packages: {}", + spec, + name_matches.join(", ") + ))?; + } + } + Ok(()) + } +} + +fn merge_toml( + pkg_id: Option, + is_member: bool, + unit_for: UnitFor, + profile: &mut Profile, + toml: &TomlProfile, +) { + merge_profile(profile, toml); + if unit_for.is_build() { + if let Some(ref build_override) = toml.build_override { + merge_profile(profile, build_override); + } + } + if let Some(ref overrides) = toml.overrides { + if !is_member { + if let Some(all) = overrides.get(&ProfilePackageSpec::All) { + merge_profile(profile, all); + } + } + if let Some(pkg_id) = pkg_id { + let mut matches = overrides + .iter() + .filter_map(|(key, spec_profile)| match *key { + ProfilePackageSpec::All => None, + ProfilePackageSpec::Spec(ref s) => { + if s.matches(pkg_id) { + Some(spec_profile) + } else { + None + } + } + }); + if let Some(spec_profile) = matches.next() { + merge_profile(profile, spec_profile); + // `validate_packages` should ensure that there are + // no additional matches. + assert!( + matches.next().is_none(), + "package `{}` matched multiple profile overrides", + pkg_id + ); + } + } + } +} + +fn merge_profile(profile: &mut Profile, toml: &TomlProfile) { + if let Some(ref opt_level) = toml.opt_level { + profile.opt_level = InternedString::new(&opt_level.0); + } + match toml.lto { + Some(StringOrBool::Bool(b)) => profile.lto = Lto::Bool(b), + Some(StringOrBool::String(ref n)) => profile.lto = Lto::Named(InternedString::new(n)), + None => {} + } + if toml.codegen_units.is_some() { + profile.codegen_units = toml.codegen_units; + } + match toml.debug { + Some(U32OrBool::U32(debug)) => profile.debuginfo = Some(debug), + Some(U32OrBool::Bool(true)) => profile.debuginfo = Some(2), + Some(U32OrBool::Bool(false)) => profile.debuginfo = None, + None => {} + } + if let Some(debug_assertions) = toml.debug_assertions { + profile.debug_assertions = debug_assertions; + } + if let Some(rpath) = toml.rpath { + profile.rpath = rpath; + } + if let Some(panic) = &toml.panic { + profile.panic = match panic.as_str() { + "unwind" => PanicStrategy::Unwind, + "abort" => PanicStrategy::Abort, + // This should be validated in TomlProfile::validate + _ => panic!("Unexpected panic setting `{}`", panic), + }; + } + if let Some(overflow_checks) = toml.overflow_checks { + profile.overflow_checks = overflow_checks; + } + if let Some(incremental) = toml.incremental { + profile.incremental = incremental; + } +} + +/// Profile settings used to determine which compiler flags to use for a +/// target. +#[derive(Clone, Copy, Eq, PartialOrd, Ord)] +pub struct Profile { + pub name: &'static str, + pub opt_level: InternedString, + pub lto: Lto, + // `None` means use rustc default. + pub codegen_units: Option, + pub debuginfo: Option, + pub debug_assertions: bool, + pub overflow_checks: bool, + pub rpath: bool, + pub incremental: bool, + pub panic: PanicStrategy, +} + +impl Default for Profile { + fn default() -> Profile { + Profile { + name: "", + opt_level: InternedString::new("0"), + lto: Lto::Bool(false), + codegen_units: None, + debuginfo: None, + debug_assertions: false, + overflow_checks: false, + rpath: false, + incremental: false, + panic: PanicStrategy::Unwind, + } + } +} + +compact_debug! { + impl fmt::Debug for Profile { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let (default, default_name) = match self.name { + "dev" => (Profile::default_dev(), "default_dev()"), + "release" => (Profile::default_release(), "default_release()"), + "test" => (Profile::default_test(), "default_test()"), + "bench" => (Profile::default_bench(), "default_bench()"), + "doc" => (Profile::default_doc(), "default_doc()"), + _ => (Profile::default(), "default()"), + }; + [debug_the_fields( + name + opt_level + lto + codegen_units + debuginfo + debug_assertions + overflow_checks + rpath + incremental + panic + )] + } + } +} + +impl fmt::Display for Profile { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Profile({})", self.name) + } +} + +impl hash::Hash for Profile { + fn hash(&self, state: &mut H) + where + H: hash::Hasher, + { + self.comparable().hash(state); + } +} + +impl cmp::PartialEq for Profile { + fn eq(&self, other: &Self) -> bool { + self.comparable() == other.comparable() + } +} + +impl Profile { + fn default_dev() -> Profile { + Profile { + name: "dev", + debuginfo: Some(2), + debug_assertions: true, + overflow_checks: true, + incremental: true, + ..Profile::default() + } + } + + fn default_release() -> Profile { + Profile { + name: "release", + opt_level: InternedString::new("3"), + ..Profile::default() + } + } + + fn default_test() -> Profile { + Profile { + name: "test", + ..Profile::default_dev() + } + } + + fn default_bench() -> Profile { + Profile { + name: "bench", + ..Profile::default_release() + } + } + + fn default_doc() -> Profile { + Profile { + name: "doc", + ..Profile::default_dev() + } + } + + /// Compares all fields except `name`, which doesn't affect compilation. + /// This is necessary for `Unit` deduplication for things like "test" and + /// "dev" which are essentially the same. + fn comparable( + &self, + ) -> ( + InternedString, + Lto, + Option, + Option, + bool, + bool, + bool, + bool, + PanicStrategy, + ) { + ( + self.opt_level, + self.lto, + self.codegen_units, + self.debuginfo, + self.debug_assertions, + self.overflow_checks, + self.rpath, + self.incremental, + self.panic, + ) + } +} + +/// The link-time-optimization setting. +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] +pub enum Lto { + /// False = no LTO + /// True = "Fat" LTO + Bool(bool), + /// Named LTO settings like "thin". + Named(InternedString), +} + +/// The `panic` setting. +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] +pub enum PanicStrategy { + Unwind, + Abort, +} + +impl fmt::Display for PanicStrategy { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + PanicStrategy::Unwind => "unwind", + PanicStrategy::Abort => "abort", + } + .fmt(f) + } +} + +/// Flags used in creating `Unit`s to indicate the purpose for the target, and +/// to ensure the target's dependencies have the correct settings. +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] +pub struct UnitFor { + /// A target for `build.rs` or any of its dependencies, or a proc-macro or + /// any of its dependencies. This enables `build-override` profiles for + /// these targets. + build: bool, + /// This is true if it is *allowed* to set the `panic=abort` flag. Currently + /// this is false for test/bench targets and all their dependencies, and + /// "for_host" units such as proc macro and custom build scripts and their + /// dependencies. + panic_abort_ok: bool, +} + +impl UnitFor { + /// A unit for a normal target/dependency (i.e., not custom build, + /// proc macro/plugin, or test/bench). + pub fn new_normal() -> UnitFor { + UnitFor { + build: false, + panic_abort_ok: true, + } + } + + /// A unit for a custom build script or its dependencies. + pub fn new_build() -> UnitFor { + UnitFor { + build: true, + panic_abort_ok: false, + } + } + + /// A unit for a proc macro or compiler plugin or their dependencies. + pub fn new_compiler() -> UnitFor { + UnitFor { + build: false, + panic_abort_ok: false, + } + } + + /// A unit for a test/bench target or their dependencies. + pub fn new_test() -> UnitFor { + UnitFor { + build: false, + panic_abort_ok: false, + } + } + + /// Creates a variant based on `for_host` setting. + /// + /// When `for_host` is true, this clears `panic_abort_ok` in a sticky fashion so + /// that all its dependencies also have `panic_abort_ok=false`. + pub fn with_for_host(self, for_host: bool) -> UnitFor { + UnitFor { + build: self.build || for_host, + panic_abort_ok: self.panic_abort_ok && !for_host, + } + } + + /// Returns `true` if this unit is for a custom build script or one of its + /// dependencies. + pub fn is_build(self) -> bool { + self.build + } + + /// Returns `true` if this unit is allowed to set the `panic` compiler flag. + pub fn is_panic_abort_ok(self) -> bool { + self.panic_abort_ok + } + + /// All possible values, used by `clean`. + pub fn all_values() -> &'static [UnitFor] { + static ALL: [UnitFor; 3] = [ + UnitFor { + build: false, + panic_abort_ok: true, + }, + UnitFor { + build: true, + panic_abort_ok: false, + }, + UnitFor { + build: false, + panic_abort_ok: false, + }, + ]; + &ALL + } +} + +/// Profiles loaded from `.cargo/config` files. +#[derive(Clone, Debug, Deserialize, Default)] +pub struct ConfigProfiles { + dev: Option, + release: Option, +} + +impl ConfigProfiles { + pub fn validate(&self, features: &Features, warnings: &mut Vec) -> CargoResult<()> { + if let Some(ref profile) = self.dev { + profile + .validate("dev", features, warnings) + .chain_err(|| failure::format_err!("config profile `profile.dev` is not valid"))?; + } + if let Some(ref profile) = self.release { + profile + .validate("release", features, warnings) + .chain_err(|| { + failure::format_err!("config profile `profile.release` is not valid") + })?; + } + Ok(()) + } +} diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 273bb5ed56b..1d1839a0cda 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -1,29 +1,35 @@ -use std::collections::HashSet; -use std::collections::hash_map::HashMap; +use std::collections::{HashMap, HashSet}; -use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package}; -use util::{CargoResult, ChainError, Config, human, profile}; +use log::{debug, trace}; +use semver::VersionReq; +use url::Url; -/// Source of informations about a group of packages. +use crate::core::PackageSet; +use crate::core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary}; +use crate::sources::config::SourceConfigMap; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{profile, Config}; + +/// Source of information about a group of packages. /// /// See also `core::Source`. pub trait Registry { /// Attempt to find the packages that match a dependency request. - fn query(&mut self, name: &Dependency) -> CargoResult>; -} - -impl Registry for Vec { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - Ok(self.iter().filter(|summary| dep.matches(*summary)) - .map(|summary| summary.clone()).collect()) + fn query( + &mut self, + dep: &Dependency, + f: &mut dyn FnMut(Summary), + fuzzy: bool, + ) -> CargoResult<()>; + + fn query_vec(&mut self, dep: &Dependency, fuzzy: bool) -> CargoResult> { + let mut ret = Vec::new(); + self.query(dep, &mut |s| ret.push(s), fuzzy)?; + Ok(ret) } -} -impl Registry for Vec { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - Ok(self.iter().filter(|pkg| dep.matches(pkg.summary())) - .map(|pkg| pkg.summary().clone()).collect()) - } + fn describe_source(&self, source: SourceId) -> String; + fn is_replaced(&self, source: SourceId) -> bool; } /// This structure represents a registry of known packages. It internally @@ -32,16 +38,16 @@ impl Registry for Vec { /// /// The resolution phase of Cargo uses this to drive knowledge about new /// packages as well as querying for lists of new packages. It is here that -/// sources are updated and (e.g. network operations) as well as overrides are +/// sources are updated (e.g., network operations) and overrides are /// handled. /// /// The general idea behind this registry is that it is centered around the -/// `SourceMap` structure contained within which is a mapping of a `SourceId` to +/// `SourceMap` structure, contained within which is a mapping of a `SourceId` to /// a `Source`. Each `Source` in the map has been updated (using network /// operations if necessary) and is ready to be queried for packages. pub struct PackageRegistry<'cfg> { - sources: SourceMap<'cfg>, config: &'cfg Config, + sources: SourceMap<'cfg>, // A list of sources which are considered "overrides" which take precedent // when querying for packages. @@ -64,9 +70,17 @@ pub struct PackageRegistry<'cfg> { // what exactly the key is. source_ids: HashMap, - locked: HashMap)>>>, + locked: LockedMap, + yanked_whitelist: HashSet, + source_config: SourceConfigMap<'cfg>, + + patches: HashMap>, + patches_locked: bool, + patches_available: HashMap>, } +type LockedMap = HashMap)>>>; + #[derive(PartialEq, Eq, Clone, Copy)] enum Kind { Override, @@ -75,57 +89,42 @@ enum Kind { } impl<'cfg> PackageRegistry<'cfg> { - pub fn new(config: &'cfg Config) -> PackageRegistry<'cfg> { - PackageRegistry { + pub fn new(config: &'cfg Config) -> CargoResult> { + let source_config = SourceConfigMap::new(config)?; + Ok(PackageRegistry { + config, sources: SourceMap::new(), source_ids: HashMap::new(), - overrides: vec!(), - config: config, + overrides: Vec::new(), + source_config, locked: HashMap::new(), - } + yanked_whitelist: HashSet::new(), + patches: HashMap::new(), + patches_locked: false, + patches_available: HashMap::new(), + }) } - pub fn get(&mut self, package_ids: &[PackageId]) -> CargoResult> { + pub fn get(self, package_ids: &[PackageId]) -> CargoResult> { trace!("getting packages; sources={}", self.sources.len()); - - // TODO: Only call source with package ID if the package came from the - // source - let mut ret = Vec::new(); - - for (_, source) in self.sources.sources_mut() { - try!(source.download(package_ids)); - let packages = try!(source.get(package_ids)); - - ret.extend(packages.into_iter()); - } - - // TODO: Return earlier if fail - assert!(package_ids.len() == ret.len(), - "could not get packages from registry; ids={:?}; ret={:?}", - package_ids, ret); - - Ok(ret) - } - - pub fn move_sources(self) -> SourceMap<'cfg> { - self.sources + PackageSet::new(package_ids, self.sources, self.config) } - fn ensure_loaded(&mut self, namespace: &SourceId) -> CargoResult<()> { - match self.source_ids.get(namespace) { + fn ensure_loaded(&mut self, namespace: SourceId, kind: Kind) -> CargoResult<()> { + match self.source_ids.get(&namespace) { // We've previously loaded this source, and we've already locked it, // so we're not allowed to change it even if `namespace` has a // slightly different precise version listed. Some(&(_, Kind::Locked)) => { debug!("load/locked {}", namespace); - return Ok(()) + return Ok(()); } // If the previous source was not a precise source, then we can be // sure that it's already been updated if we've already loaded it. Some(&(ref previous, _)) if previous.precise().is_none() => { debug!("load/precise {}", namespace); - return Ok(()) + return Ok(()); } // If the previous source has the same precise version as we do, @@ -134,7 +133,7 @@ impl<'cfg> PackageRegistry<'cfg> { Some(&(ref previous, _)) => { if previous.precise() == namespace.precise() { debug!("load/match {}", namespace); - return Ok(()) + return Ok(()); } debug!("load/mismatch {}", namespace); } @@ -143,228 +142,525 @@ impl<'cfg> PackageRegistry<'cfg> { } } - try!(self.load(namespace, Kind::Normal)); + self.load(namespace, kind)?; Ok(()) } - pub fn preload(&mut self, id: &SourceId, source: Box) { - self.sources.insert(id, source); - self.source_ids.insert(id.clone(), (id.clone(), Kind::Locked)); - } - - pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> { - for id in ids.iter() { - try!(self.load(id, Kind::Locked)); + pub fn add_sources(&mut self, ids: impl IntoIterator) -> CargoResult<()> { + for id in ids { + self.ensure_loaded(id, Kind::Locked)?; } Ok(()) } - pub fn add_overrides(&mut self, ids: Vec) -> CargoResult<()> { - for id in ids.iter() { - try!(self.load(id, Kind::Override)); + pub fn add_preloaded(&mut self, source: Box) { + self.add_source(source, Kind::Locked); + } + + fn add_source(&mut self, source: Box, kind: Kind) { + let id = source.source_id(); + self.sources.insert(source); + self.source_ids.insert(id, (id, kind)); + } + + pub fn add_override(&mut self, source: Box) { + self.overrides.push(source.source_id()); + self.add_source(source, Kind::Override); + } + + pub fn add_to_yanked_whitelist(&mut self, iter: impl Iterator) { + let pkgs = iter.collect::>(); + for (_, source) in self.sources.sources_mut() { + source.add_to_yanked_whitelist(&pkgs); } - Ok(()) + self.yanked_whitelist.extend(pkgs); } pub fn register_lock(&mut self, id: PackageId, deps: Vec) { - let sub_map = self.locked.entry(id.source_id().clone()) - .or_insert(HashMap::new()); - let sub_vec = sub_map.entry(id.name().to_string()) - .or_insert(Vec::new()); + trace!("register_lock: {}", id); + for dep in deps.iter() { + trace!("\t-> {}", dep); + } + let sub_map = self + .locked + .entry(id.source_id()) + .or_insert_with(HashMap::new); + let sub_vec = sub_map + .entry(id.name().to_string()) + .or_insert_with(Vec::new); sub_vec.push((id, deps)); } - fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> { - (|| { - let mut source = source_id.load(self.config); + /// Insert a `[patch]` section into this registry. + /// + /// This method will insert a `[patch]` section for the `url` specified, + /// with the given list of dependencies. The `url` specified is the URL of + /// the source to patch (for example this is `crates-io` in the manifest). + /// The `deps` is an array of all the entries in the `[patch]` section of + /// the manifest. + /// + /// Here the `deps` will be resolved to a precise version and stored + /// internally for future calls to `query` below. It's expected that `deps` + /// have had `lock_to` call already, if applicable. (e.g., if a lock file was + /// already present). + /// + /// Note that the patch list specified here *will not* be available to + /// `query` until `lock_patches` is called below, which should be called + /// once all patches have been added. + pub fn patch(&mut self, url: &Url, deps: &[Dependency]) -> CargoResult<()> { + // First up we need to actually resolve each `deps` specification to + // precisely one summary. We're not using the `query` method below as it + // internally uses maps we're building up as part of this method + // (`patches_available` and `patches). Instead we're going straight to + // the source to load information from it. + // + // Remember that each dependency listed in `[patch]` has to resolve to + // precisely one package, so that's why we're just creating a flat list + // of summaries which should be the same length as `deps` above. + let unlocked_summaries = deps + .iter() + .map(|dep| { + debug!( + "registering a patch for `{}` with `{}`", + url, + dep.package_name() + ); + + // Go straight to the source for resolving `dep`. Load it as we + // normally would and then ask it directly for the list of summaries + // corresponding to this `dep`. + self.ensure_loaded(dep.source_id(), Kind::Normal) + .chain_err(|| { + failure::format_err!( + "failed to load source for a dependency \ + on `{}`", + dep.package_name() + ) + })?; + + let mut summaries = self + .sources + .get_mut(dep.source_id()) + .expect("loaded source not present") + .query_vec(dep)? + .into_iter(); + + let summary = match summaries.next() { + Some(summary) => summary, + None => failure::bail!( + "patch for `{}` in `{}` did not resolve to any crates. If this is \ + unexpected, you may wish to consult: \ + https://github.com/rust-lang/cargo/issues/4678", + dep.package_name(), + url + ), + }; + if summaries.next().is_some() { + failure::bail!( + "patch for `{}` in `{}` resolved to more than one candidate", + dep.package_name(), + url + ) + } + if summary.package_id().source_id().url() == url { + failure::bail!( + "patch for `{}` in `{}` points to the same source, but \ + patches must point to different sources", + dep.package_name(), + url + ); + } + Ok(summary) + }) + .collect::>>() + .chain_err(|| failure::format_err!("failed to resolve patches for `{}`", url))?; + + // Note that we do not use `lock` here to lock summaries! That step + // happens later once `lock_patches` is invoked. In the meantime though + // we want to fill in the `patches_available` map (later used in the + // `lock` method) and otherwise store the unlocked summaries in + // `patches` to get locked in a future call to `lock_patches`. + let ids = unlocked_summaries.iter().map(|s| s.package_id()).collect(); + self.patches_available.insert(url.clone(), ids); + self.patches.insert(url.clone(), unlocked_summaries); - // Ensure the source has fetched all necessary remote data. - let p = profile::start(format!("updating: {}", source_id)); - try!(source.update()); - drop(p); + Ok(()) + } - if kind == Kind::Override { - self.overrides.push(source_id.clone()); + /// Lock all patch summaries added via `patch`, making them available to + /// resolution via `query`. + /// + /// This function will internally `lock` each summary added via `patch` + /// above now that the full set of `patch` packages are known. This'll allow + /// us to correctly resolve overridden dependencies between patches + /// hopefully! + pub fn lock_patches(&mut self) { + assert!(!self.patches_locked); + for summaries in self.patches.values_mut() { + for summary in summaries { + *summary = lock(&self.locked, &self.patches_available, summary.clone()); } + } + self.patches_locked = true; + } - // Save off the source - self.sources.insert(source_id, source); - self.source_ids.insert(source_id.clone(), (source_id.clone(), kind)); + pub fn patches(&self) -> &HashMap> { + &self.patches + } - Ok(()) - }).chain_error(|| human(format!("Unable to update {}", source_id))) + fn load(&mut self, source_id: SourceId, kind: Kind) -> CargoResult<()> { + (|| { + debug!("loading source {}", source_id); + let source = self.source_config.load(source_id, &self.yanked_whitelist)?; + assert_eq!(source.source_id(), source_id); + + if kind == Kind::Override { + self.overrides.push(source_id); + } + self.add_source(source, kind); + + // Ensure the source has fetched all necessary remote data. + let _p = profile::start(format!("updating: {}", source_id)); + self.sources.get_mut(source_id).unwrap().update() + })() + .chain_err(|| failure::format_err!("Unable to update {}", source_id))?; + Ok(()) } - fn query_overrides(&mut self, dep: &Dependency) - -> CargoResult> { - let mut seen = HashSet::new(); - let mut ret = Vec::new(); - for s in self.overrides.iter() { + fn query_overrides(&mut self, dep: &Dependency) -> CargoResult> { + for &s in self.overrides.iter() { let src = self.sources.get_mut(s).unwrap(); - let dep = Dependency::new_override(dep.name(), s); - ret.extend(try!(src.query(&dep)).into_iter().filter(|s| { - seen.insert(s.name().to_string()) - })); + let dep = Dependency::new_override(&*dep.package_name(), s); + let mut results = src.query_vec(&dep)?; + if !results.is_empty() { + return Ok(Some(results.remove(0))); + } } - Ok(ret) + Ok(None) } - // This function is used to transform a summary to another locked summary if - // possible. This is where the concept of a lockfile comes into play. - // - // If a summary points at a package id which was previously locked, then we - // override the summary's id itself as well as all dependencies to be - // rewritten to the locked versions. This will transform the summary's - // source to a precise source (listed in the locked version) as well as - // transforming all of the dependencies from range requirements on imprecise - // sources to exact requirements on precise sources. - // - // If a summary does not point at a package id which was previously locked, - // we still want to avoid updating as many dependencies as possible to keep - // the graph stable. In this case we map all of the summary's dependencies - // to be rewritten to a locked version wherever possible. If we're unable to - // map a dependency though, we just pass it on through. - fn lock(&self, summary: Summary) -> Summary { - let pair = self.locked.get(summary.source_id()).and_then(|map| { - map.get(summary.name()) - }).and_then(|vec| { - vec.iter().find(|&&(ref id, _)| id == summary.package_id()) - }); - - // Lock the summary's id if possible - let summary = match pair { - Some(&(ref precise, _)) => summary.override_id(precise.clone()), - None => summary, - }; - summary.map_dependencies(|dep| { - match pair { - // If we've got a known set of overrides for this summary, then - // one of a few cases can arise: - // - // 1. We have a lock entry for this dependency from the same - // source as its listed as coming from. In this case we make - // sure to lock to precisely the given package id. - // - // 2. We have a lock entry for this dependency, but it's from a - // different source than what's listed, or the version - // requirement has changed. In this case we must discard the - // locked version because the dependency needs to be - // re-resolved. - // - // 3. We don't have a lock entry for this dependency, in which - // case it was likely an optional dependency which wasn't - // included previously so we just pass it through anyway. - Some(&(_, ref deps)) => { - match deps.iter().find(|d| d.name() == dep.name()) { - Some(lock) => { - if dep.matches_id(lock) { - dep.lock_to(lock) - } else { - dep - } - } - None => dep, - } - } + /// This function is used to transform a summary to another locked summary + /// if possible. This is where the concept of a lock file comes into play. + /// + /// If a summary points at a package ID which was previously locked, then we + /// override the summary's ID itself, as well as all dependencies, to be + /// rewritten to the locked versions. This will transform the summary's + /// source to a precise source (listed in the locked version) as well as + /// transforming all of the dependencies from range requirements on + /// imprecise sources to exact requirements on precise sources. + /// + /// If a summary does not point at a package ID which was previously locked, + /// or if any dependencies were added and don't have a previously listed + /// version, we still want to avoid updating as many dependencies as + /// possible to keep the graph stable. In this case we map all of the + /// summary's dependencies to be rewritten to a locked version wherever + /// possible. If we're unable to map a dependency though, we just pass it on + /// through. + pub fn lock(&self, summary: Summary) -> Summary { + assert!(self.patches_locked); + lock(&self.locked, &self.patches_available, summary) + } - // If this summary did not have a locked version, then we query - // all known locked packages to see if they match this - // dependency. If anything does then we lock it to that and move - // on. - None => { - let v = self.locked.get(dep.source_id()).and_then(|map| { - map.get(dep.name()) - }).and_then(|vec| { - vec.iter().find(|&&(ref id, _)| dep.matches_id(id)) - }); - match v { - Some(&(ref id, _)) => dep.lock_to(id), - None => dep - } - } + fn warn_bad_override( + &self, + override_summary: &Summary, + real_summary: &Summary, + ) -> CargoResult<()> { + let mut real_deps = real_summary.dependencies().iter().collect::>(); + + let boilerplate = "\ +This is currently allowed but is known to produce buggy behavior with spurious +recompiles and changes to the crate graph. Path overrides unfortunately were +never intended to support this feature, so for now this message is just a +warning. In the future, however, this message will become a hard error. + +To change the dependency graph via an override it's recommended to use the +`[replace]` feature of Cargo instead of the path override feature. This is +documented online at the url below for more information. + +https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#overriding-dependencies +"; + + for dep in override_summary.dependencies() { + if let Some(i) = real_deps.iter().position(|d| dep == *d) { + real_deps.remove(i); + continue; } - }) + let msg = format!( + "path override for crate `{}` has altered the original list of\n\ + dependencies; the dependency on `{}` was either added or\n\ + modified to not match the previously resolved version\n\n\ + {}", + override_summary.package_id().name(), + dep.package_name(), + boilerplate + ); + self.source_config.config().shell().warn(&msg)?; + return Ok(()); + } + + if let Some(dep) = real_deps.get(0) { + let msg = format!( + "path override for crate `{}` has altered the original list of\n\ + dependencies; the dependency on `{}` was removed\n\n\ + {}", + override_summary.package_id().name(), + dep.package_name(), + boilerplate + ); + self.source_config.config().shell().warn(&msg)?; + return Ok(()); + } + + Ok(()) } } impl<'cfg> Registry for PackageRegistry<'cfg> { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - let overrides = try!(self.query_overrides(dep)); - - let ret = if overrides.len() == 0 { - // Ensure the requested source_id is loaded - try!(self.ensure_loaded(dep.source_id())); - let mut ret = Vec::new(); - for (id, src) in self.sources.sources_mut() { - if id == dep.source_id() { - ret.extend(try!(src.query(dep)).into_iter()); - } + fn query( + &mut self, + dep: &Dependency, + f: &mut dyn FnMut(Summary), + fuzzy: bool, + ) -> CargoResult<()> { + assert!(self.patches_locked); + let (override_summary, n, to_warn) = { + // Look for an override and get ready to query the real source. + let override_summary = self.query_overrides(dep)?; + + // Next up on our list of candidates is to check the `[patch]` + // section of the manifest. Here we look through all patches + // relevant to the source that `dep` points to, and then we match + // name/version. Note that we don't use `dep.matches(..)` because + // the patches, by definition, come from a different source. + // This means that `dep.matches(..)` will always return false, when + // what we really care about is the name/version match. + let mut patches = Vec::::new(); + if let Some(extra) = self.patches.get(dep.source_id().url()) { + patches.extend( + extra + .iter() + .filter(|s| dep.matches_ignoring_source(s.package_id())) + .cloned(), + ); } - ret - } else { - overrides - }; - // post-process all returned summaries to ensure that we lock all - // relevant summaries to the right versions and sources - Ok(ret.into_iter().map(|summary| self.lock(summary)).collect()) - } -} + // A crucial feature of the `[patch]` feature is that we *don't* + // query the actual registry if we have a "locked" dependency. A + // locked dep basically just means a version constraint of `=a.b.c`, + // and because patches take priority over the actual source then if + // we have a candidate we're done. + if patches.len() == 1 && dep.is_locked() { + let patch = patches.remove(0); + match override_summary { + Some(summary) => (summary, 1, Some(patch)), + None => { + f(patch); + return Ok(()); + } + } + } else { + if !patches.is_empty() { + debug!( + "found {} patches with an unlocked dep on `{}` at {} \ + with `{}`, \ + looking at sources", + patches.len(), + dep.package_name(), + dep.source_id(), + dep.version_req() + ); + } -#[cfg(test)] -pub mod test { - use core::{Summary, Registry, Dependency}; - use util::{CargoResult}; + // Ensure the requested source_id is loaded + self.ensure_loaded(dep.source_id(), Kind::Normal) + .chain_err(|| { + failure::format_err!( + "failed to load source for a dependency \ + on `{}`", + dep.package_name() + ) + })?; + + let source = self.sources.get_mut(dep.source_id()); + match (override_summary, source) { + (Some(_), None) => failure::bail!("override found but no real ones"), + (None, None) => return Ok(()), + + // If we don't have an override then we just ship + // everything upstairs after locking the summary + (None, Some(source)) => { + for patch in patches.iter() { + f(patch.clone()); + } - pub struct RegistryBuilder { - summaries: Vec, - overrides: Vec - } + // Our sources shouldn't ever come back to us with two + // summaries that have the same version. We could, + // however, have an `[patch]` section which is in use + // to override a version in the registry. This means + // that if our `summary` in this loop has the same + // version as something in `patches` that we've + // already selected, then we skip this `summary`. + let locked = &self.locked; + let all_patches = &self.patches_available; + let callback = &mut |summary: Summary| { + for patch in patches.iter() { + let patch = patch.package_id().version(); + if summary.package_id().version() == patch { + return; + } + } + f(lock(locked, all_patches, summary)) + }; + return if fuzzy { + source.fuzzy_query(dep, callback) + } else { + source.query(dep, callback) + }; + } - impl RegistryBuilder { - pub fn new() -> RegistryBuilder { - RegistryBuilder { summaries: vec!(), overrides: vec!() } - } + // If we have an override summary then we query the source + // to sanity check its results. We don't actually use any of + // the summaries it gives us though. + (Some(override_summary), Some(source)) => { + if !patches.is_empty() { + failure::bail!("found patches and a path override") + } + let mut n = 0; + let mut to_warn = None; + { + let callback = &mut |summary| { + n += 1; + to_warn = Some(summary); + }; + if fuzzy { + source.fuzzy_query(dep, callback)?; + } else { + source.query(dep, callback)?; + } + } + (override_summary, n, to_warn) + } + } + } + }; - pub fn summary(mut self, summary: Summary) -> RegistryBuilder { - self.summaries.push(summary); - self + if n > 1 { + failure::bail!("found an override with a non-locked list"); + } else if let Some(summary) = to_warn { + self.warn_bad_override(&override_summary, &summary)?; } + f(self.lock(override_summary)); + Ok(()) + } - pub fn summaries(mut self, summaries: Vec) -> RegistryBuilder { - self.summaries.extend(summaries.into_iter()); - self + fn describe_source(&self, id: SourceId) -> String { + match self.sources.get(id) { + Some(src) => src.describe(), + None => id.to_string(), } + } - pub fn add_override(mut self, summary: Summary) -> RegistryBuilder { - self.overrides.push(summary); - self + fn is_replaced(&self, id: SourceId) -> bool { + match self.sources.get(id) { + Some(src) => src.is_replaced(), + None => false, } + } +} - pub fn overrides(mut self, summaries: Vec) -> RegistryBuilder { - self.overrides.extend(summaries.into_iter()); - self +fn lock(locked: &LockedMap, patches: &HashMap>, summary: Summary) -> Summary { + let pair = locked + .get(&summary.source_id()) + .and_then(|map| map.get(&*summary.name())) + .and_then(|vec| vec.iter().find(|&&(id, _)| id == summary.package_id())); + + trace!("locking summary of {}", summary.package_id()); + + // Lock the summary's ID if possible + let summary = match pair { + Some(&(ref precise, _)) => summary.override_id(precise.clone()), + None => summary, + }; + summary.map_dependencies(|dep| { + trace!( + "\t{}/{}/{}", + dep.package_name(), + dep.version_req(), + dep.source_id() + ); + + // If we've got a known set of overrides for this summary, then + // one of a few cases can arise: + // + // 1. We have a lock entry for this dependency from the same + // source as it's listed as coming from. In this case we make + // sure to lock to precisely the given package ID. + // + // 2. We have a lock entry for this dependency, but it's from a + // different source than what's listed, or the version + // requirement has changed. In this case we must discard the + // locked version because the dependency needs to be + // re-resolved. + // + // 3. We don't have a lock entry for this dependency, in which + // case it was likely an optional dependency which wasn't + // included previously so we just pass it through anyway. + // + // Cases 1/2 are handled by `matches_id` and case 3 is handled by + // falling through to the logic below. + if let Some(&(_, ref locked_deps)) = pair { + let locked = locked_deps.iter().find(|&&id| dep.matches_id(id)); + if let Some(&locked) = locked { + trace!("\tfirst hit on {}", locked); + let mut dep = dep; + dep.lock_to(locked); + return dep; + } } - fn query_overrides(&self, dep: &Dependency) -> Vec { - self.overrides.iter() - .filter(|s| s.name() == dep.name()) - .map(|s| s.clone()) - .collect() + // If this dependency did not have a locked version, then we query + // all known locked packages to see if they match this dependency. + // If anything does then we lock it to that and move on. + let v = locked + .get(&dep.source_id()) + .and_then(|map| map.get(&*dep.package_name())) + .and_then(|vec| vec.iter().find(|&&(id, _)| dep.matches_id(id))); + if let Some(&(id, _)) = v { + trace!("\tsecond hit on {}", id); + let mut dep = dep; + dep.lock_to(id); + return dep; } - } - impl Registry for RegistryBuilder { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - debug!("querying; dep={:?}", dep); - - let overrides = self.query_overrides(dep); - - if overrides.is_empty() { - self.summaries.query(dep) - } else { - Ok(overrides) + // Finally we check to see if any registered patches correspond to + // this dependency. + let v = patches.get(dep.source_id().url()).map(|vec| { + let dep2 = dep.clone(); + let mut iter = vec + .iter() + .filter(move |&&p| dep2.matches_ignoring_source(p)); + (iter.next(), iter) + }); + if let Some((Some(patch_id), mut remaining)) = v { + assert!(remaining.next().is_none()); + let patch_source = patch_id.source_id(); + let patch_locked = locked + .get(&patch_source) + .and_then(|m| m.get(&*patch_id.name())) + .map(|list| list.iter().any(|&(ref id, _)| id == patch_id)) + .unwrap_or(false); + + if patch_locked { + trace!("\tthird hit on {}", patch_id); + let req = VersionReq::exact(patch_id.version()); + let mut dep = dep; + dep.set_version_req(req); + return dep; } } - } + + trace!("\tnope, unlocked"); + dep + }) } diff --git a/src/cargo/core/resolver/conflict_cache.rs b/src/cargo/core/resolver/conflict_cache.rs new file mode 100644 index 00000000000..b88ad6825e7 --- /dev/null +++ b/src/cargo/core/resolver/conflict_cache.rs @@ -0,0 +1,225 @@ +use std::collections::{BTreeMap, HashMap, HashSet}; + +use log::trace; + +use super::types::{ConflictMap, ConflictReason}; +use crate::core::resolver::Context; +use crate::core::{Dependency, PackageId}; + +/// This is a trie for storing a large number of sets designed to +/// efficiently see if any of the stored sets are a subset of a search set. +enum ConflictStoreTrie { + /// One of the stored sets. + Leaf(ConflictMap), + /// A map from an element to a subtrie where + /// all the sets in the subtrie contains that element. + Node(BTreeMap), +} + +impl ConflictStoreTrie { + /// Finds any known set of conflicts, if any, + /// where all elements return some from `is_active` and contain `PackageId` specified. + /// If more then one are activated, then it will return + /// one that will allow for the most jump-back. + fn find( + &self, + is_active: &impl Fn(PackageId) -> Option, + must_contain: Option, + mut max_age: usize, + ) -> Option<(&ConflictMap, usize)> { + match self { + ConflictStoreTrie::Leaf(c) => { + if must_contain.is_none() { + Some((c, 0)) + } else { + // We did not find `must_contain`, so we need to keep looking. + None + } + } + ConflictStoreTrie::Node(m) => { + let mut out = None; + for (&pid, store) in must_contain + .map(|f| m.range(..=f)) + .unwrap_or_else(|| m.range(..)) + { + // If the key is active, then we need to check all of the corresponding subtrie. + if let Some(age_this) = is_active(pid) { + if age_this >= max_age && must_contain != Some(pid) { + // not worth looking at, it is to old. + continue; + } + if let Some((o, age_o)) = + store.find(is_active, must_contain.filter(|&f| f != pid), max_age) + { + let age = if must_contain == Some(pid) { + // all the results will include `must_contain` + // so the age of must_contain is not relevant to find the best result. + age_o + } else { + std::cmp::max(age_this, age_o) + }; + if max_age > age { + // we found one that can jump-back further so replace the out. + out = Some((o, age)); + // and dont look at anything older + max_age = age + } + } + } + // Else, if it is not active then there is no way any of the corresponding + // subtrie will be conflicting. + } + out + } + } + } + + fn insert(&mut self, mut iter: impl Iterator, con: ConflictMap) { + if let Some(pid) = iter.next() { + if let ConflictStoreTrie::Node(p) = self { + p.entry(pid) + .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new())) + .insert(iter, con); + } + // Else, we already have a subset of this in the `ConflictStore`. + } else { + // We are at the end of the set we are adding, there are three cases for what to do + // next: + // 1. `self` is a empty dummy Node inserted by `or_insert_with` + // in witch case we should replace it with `Leaf(con)`. + // 2. `self` is a `Node` because we previously inserted a superset of + // the thing we are working on (I don't know if this happens in practice) + // but the subset that we are working on will + // always match any time the larger set would have + // in witch case we can replace it with `Leaf(con)`. + // 3. `self` is a `Leaf` that is in the same spot in the structure as + // the thing we are working on. So it is equivalent. + // We can replace it with `Leaf(con)`. + if cfg!(debug_assertions) { + if let ConflictStoreTrie::Leaf(c) = self { + let a: Vec<_> = con.keys().collect(); + let b: Vec<_> = c.keys().collect(); + assert_eq!(a, b); + } + } + *self = ConflictStoreTrie::Leaf(con) + } + } +} + +pub(super) struct ConflictCache { + // `con_from_dep` is a cache of the reasons for each time we + // backtrack. For example after several backtracks we may have: + // + // con_from_dep[`foo = "^1.0.2"`] = map!{ + // `foo=1.0.1`: map!{`foo=1.0.1`: Semver}, + // `foo=1.0.0`: map!{`foo=1.0.0`: Semver}, + // }; + // + // This can be read as "we cannot find a candidate for dep `foo = "^1.0.2"` + // if either `foo=1.0.1` OR `foo=1.0.0` are activated". + // + // Another example after several backtracks we may have: + // + // con_from_dep[`foo = ">=0.8.2, <=0.9.3"`] = map!{ + // `foo=0.8.1`: map!{ + // `foo=0.9.4`: map!{`foo=0.8.1`: Semver, `foo=0.9.4`: Semver}, + // } + // }; + // + // This can be read as "we cannot find a candidate for dep `foo = ">=0.8.2, + // <=0.9.3"` if both `foo=0.8.1` AND `foo=0.9.4` are activated". + // + // This is used to make sure we don't queue work we know will fail. See the + // discussion in https://github.com/rust-lang/cargo/pull/5168 for why this + // is so important. The nested HashMaps act as a kind of btree, that lets us + // look up which entries are still active without + // linearly scanning through the full list. + // + // Also, as a final note, this map is **not** ever removed from. This remains + // as a global cache which we never delete from. Any entry in this map is + // unconditionally true regardless of our resolution history of how we got + // here. + con_from_dep: HashMap, + // `dep_from_pid` is an inverse-index of `con_from_dep`. + // For every `PackageId` this lists the `Dependency`s that mention it in `dep_from_pid`. + dep_from_pid: HashMap>, +} + +impl ConflictCache { + pub fn new() -> ConflictCache { + ConflictCache { + con_from_dep: HashMap::new(), + dep_from_pid: HashMap::new(), + } + } + pub fn find( + &self, + dep: &Dependency, + is_active: &impl Fn(PackageId) -> Option, + must_contain: Option, + max_age: usize, + ) -> Option<&ConflictMap> { + self.con_from_dep + .get(dep)? + .find(is_active, must_contain, max_age) + .map(|(c, _)| c) + } + /// Finds any known set of conflicts, if any, + /// which are activated in `cx` and contain `PackageId` specified. + /// If more then one are activated, then it will return + /// one that will allow for the most jump-back. + pub fn find_conflicting( + &self, + cx: &Context, + dep: &Dependency, + must_contain: Option, + ) -> Option<&ConflictMap> { + let out = self.find(dep, &|id| cx.is_active(id), must_contain, std::usize::MAX); + if cfg!(debug_assertions) { + if let Some(c) = &out { + assert!(cx.is_conflicting(None, c).is_some()); + if let Some(f) = must_contain { + assert!(c.contains_key(&f)); + } + } + } + out + } + pub fn conflicting(&self, cx: &Context, dep: &Dependency) -> Option<&ConflictMap> { + self.find_conflicting(cx, dep, None) + } + + /// Adds to the cache a conflict of the form: + /// `dep` is known to be unresolvable if + /// all the `PackageId` entries are activated. + pub fn insert(&mut self, dep: &Dependency, con: &ConflictMap) { + if con.values().any(|c| *c == ConflictReason::PublicDependency) { + // TODO: needs more info for back jumping + // for now refuse to cache it. + return; + } + self.con_from_dep + .entry(dep.clone()) + .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new())) + .insert(con.keys().cloned(), con.clone()); + + trace!( + "{} = \"{}\" adding a skip {:?}", + dep.package_name(), + dep.version_req(), + con + ); + + for c in con.keys() { + self.dep_from_pid + .entry(c.clone()) + .or_insert_with(HashSet::new) + .insert(dep.clone()); + } + } + + pub fn dependencies_conflicting_with(&self, pid: PackageId) -> Option<&HashSet> { + self.dep_from_pid.get(&pid) + } +} diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs new file mode 100644 index 00000000000..27b9a0585eb --- /dev/null +++ b/src/cargo/core/resolver/context.rs @@ -0,0 +1,237 @@ +use std::collections::HashMap; +use std::num::NonZeroU64; +use std::rc::Rc; + +// "ensure" seems to require "bail" be in scope (macro hygiene issue?). +#[allow(unused_imports)] +use failure::{bail, ensure}; +use log::debug; + +use crate::core::interning::InternedString; +use crate::core::{Dependency, PackageId, SourceId, Summary}; +use crate::util::CargoResult; +use crate::util::Graph; + +use super::dep_cache::RegistryQueryer; +use super::types::{ConflictMap, FeaturesSet, ResolveOpts}; + +pub use super::encode::Metadata; +pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; +pub use super::resolve::Resolve; + +// A `Context` is basically a bunch of local resolution information which is +// kept around for all `BacktrackFrame` instances. As a result, this runs the +// risk of being cloned *a lot* so we want to make this as cheap to clone as +// possible. +#[derive(Clone)] +pub struct Context { + pub activations: Activations, + /// list the features that are activated for each package + pub resolve_features: im_rc::HashMap, + /// get the package that will be linking to a native library by its links attribute + pub links: im_rc::HashMap, + /// for each package the list of names it can see, + /// then for each name the exact version that name represents and weather the name is public. + pub public_dependency: + Option>>, + + /// a way to look up for a package in activations what packages required it + /// and all of the exact deps that it fulfilled. + pub parents: Graph>>, +} + +/// When backtracking it can be useful to know how far back to go. +/// The `ContextAge` of a `Context` is a monotonically increasing counter of the number +/// of decisions made to get to this state. +/// Several structures store the `ContextAge` when it was added, +/// to be used in `find_candidate` for backtracking. +pub type ContextAge = usize; + +/// Find the activated version of a crate based on the name, source, and semver compatibility. +/// By storing this in a hash map we ensure that there is only one +/// semver compatible version of each crate. +/// This all so stores the `ContextAge`. +pub type Activations = + im_rc::HashMap<(InternedString, SourceId, SemverCompatibility), (Summary, ContextAge)>; + +/// A type that represents when cargo treats two Versions as compatible. +/// Versions `a` and `b` are compatible if their left-most nonzero digit is the +/// same. +#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] +pub enum SemverCompatibility { + Major(NonZeroU64), + Minor(NonZeroU64), + Patch(u64), +} + +impl From<&semver::Version> for SemverCompatibility { + fn from(ver: &semver::Version) -> Self { + if let Some(m) = NonZeroU64::new(ver.major) { + return SemverCompatibility::Major(m); + } + if let Some(m) = NonZeroU64::new(ver.minor) { + return SemverCompatibility::Minor(m); + } + SemverCompatibility::Patch(ver.patch) + } +} + +impl PackageId { + pub fn as_activations_key(self) -> (InternedString, SourceId, SemverCompatibility) { + (self.name(), self.source_id(), self.version().into()) + } +} + +impl Context { + pub fn new(check_public_visible_dependencies: bool) -> Context { + Context { + resolve_features: im_rc::HashMap::new(), + links: im_rc::HashMap::new(), + public_dependency: if check_public_visible_dependencies { + Some(im_rc::HashMap::new()) + } else { + None + }, + parents: Graph::new(), + activations: im_rc::HashMap::new(), + } + } + + /// Activate this summary by inserting it into our list of known activations. + /// + /// The `parent` passed in here is the parent summary/dependency edge which + /// cased `summary` to get activated. This may not be present for the root + /// crate, for example. + /// + /// Returns `true` if this summary with the given features is already activated. + pub fn flag_activated( + &mut self, + summary: &Summary, + opts: &ResolveOpts, + parent: Option<(&Summary, &Dependency)>, + ) -> CargoResult { + let id = summary.package_id(); + let age: ContextAge = self.age(); + match self.activations.entry(id.as_activations_key()) { + im_rc::hashmap::Entry::Occupied(o) => { + debug_assert_eq!( + &o.get().0, + summary, + "cargo does not allow two semver compatible versions" + ); + } + im_rc::hashmap::Entry::Vacant(v) => { + if let Some(link) = summary.links() { + ensure!( + self.links.insert(link, id).is_none(), + "Attempting to resolve a dependency with more then one crate with the \ + links={}.\nThis will not build as is. Consider rebuilding the .lock file.", + &*link + ); + } + v.insert((summary.clone(), age)); + + // If we've got a parent dependency which activated us, *and* + // the dependency has a different source id listed than the + // `summary` itself, then things get interesting. This basically + // means that a `[patch]` was used to augment `dep.source_id()` + // with `summary`. + // + // In this scenario we want to consider the activation key, as + // viewed from the perspective of `dep.source_id()`, as being + // fulfilled. This means that we need to add a second entry in + // the activations map for the source that was patched, in + // addition to the source of the actual `summary` itself. + // + // Without this it would be possible to have both 1.0.0 and + // 1.1.0 "from crates.io" in a dependency graph if one of those + // versions came from a `[patch]` source. + if let Some((_, dep)) = parent { + if dep.source_id() != id.source_id() { + let key = (id.name(), dep.source_id(), id.version().into()); + let prev = self.activations.insert(key, (summary.clone(), age)); + assert!(prev.is_none()); + } + } + + return Ok(false); + } + } + debug!("checking if {} is already activated", summary.package_id()); + if opts.all_features { + return Ok(false); + } + + let has_default_feature = summary.features().contains_key("default"); + Ok(match self.resolve_features.get(&id) { + Some(prev) => { + opts.features.is_subset(prev) + && (!opts.uses_default_features + || prev.contains("default") + || !has_default_feature) + } + None => { + opts.features.is_empty() && (!opts.uses_default_features || !has_default_feature) + } + }) + } + + /// Returns the `ContextAge` of this `Context`. + /// For now we use (len of activations) as the age. + /// See the `ContextAge` docs for more details. + pub fn age(&self) -> ContextAge { + self.activations.len() + } + + /// If the package is active returns the `ContextAge` when it was added + pub fn is_active(&self, id: PackageId) -> Option { + self.activations + .get(&id.as_activations_key()) + .and_then(|(s, l)| if s.package_id() == id { Some(*l) } else { None }) + } + + /// Checks whether all of `parent` and the keys of `conflicting activations` + /// are still active. + /// If so returns the `ContextAge` when the newest one was added. + pub fn is_conflicting( + &self, + parent: Option, + conflicting_activations: &ConflictMap, + ) -> Option { + let mut max = 0; + for &id in conflicting_activations.keys().chain(parent.as_ref()) { + if let Some(age) = self.is_active(id) { + max = std::cmp::max(max, age); + } else { + return None; + } + } + Some(max) + } + + pub fn resolve_replacements( + &self, + registry: &RegistryQueryer<'_>, + ) -> HashMap { + self.activations + .values() + .filter_map(|(s, _)| registry.used_replacement_for(s.package_id())) + .collect() + } + + pub fn graph(&self) -> Graph> { + let mut graph: Graph> = Graph::new(); + self.activations + .values() + .for_each(|(r, _)| graph.add(r.package_id())); + for i in self.parents.iter() { + graph.add(*i); + for (o, e) in self.parents.edges(i) { + let old_link = graph.link(*o, *i); + assert!(old_link.is_empty()); + *old_link = e.to_vec(); + } + } + graph + } +} diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs new file mode 100644 index 00000000000..65eca251908 --- /dev/null +++ b/src/cargo/core/resolver/dep_cache.rs @@ -0,0 +1,463 @@ +//! There are 2 sources of facts for the resolver: +//! +//! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it. +//! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated. +//! +//! These constitute immutable facts, the soled ground truth that all other inference depends on. +//! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only +//! look up things we need to. The compromise is to cache the results as they are computed. +//! +//! This module impl that cache in all the gory details + +use std::cmp::Ordering; +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::rc::Rc; + +use log::debug; + +use crate::core::interning::InternedString; +use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; +use crate::util::errors::CargoResult; + +use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; +use crate::core::resolver::{ActivateResult, ResolveOpts}; + +pub struct RegistryQueryer<'a> { + pub registry: &'a mut (dyn Registry + 'a), + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet, + /// If set the list of dependency candidates will be sorted by minimal + /// versions first. That allows `cargo update -Z minimal-versions` which will + /// specify minimum dependency versions to be used. + minimal_versions: bool, + /// a cache of `Candidate`s that fulfil a `Dependency` + registry_cache: HashMap>>, + /// a cache of `Dependency`s that are required for a `Summary` + summary_cache: HashMap< + (Option, Summary, ResolveOpts), + Rc<(HashSet, Rc>)>, + >, + /// all the cases we ended up using a supplied replacement + used_replacements: HashMap, +} + +impl<'a> RegistryQueryer<'a> { + pub fn new( + registry: &'a mut dyn Registry, + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a HashSet, + minimal_versions: bool, + ) -> Self { + RegistryQueryer { + registry, + replacements, + try_to_use, + minimal_versions, + registry_cache: HashMap::new(), + summary_cache: HashMap::new(), + used_replacements: HashMap::new(), + } + } + + pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { + self.used_replacements.get(&p).map(|r| (p, r.package_id())) + } + + pub fn replacement_summary(&self, p: PackageId) -> Option<&Summary> { + self.used_replacements.get(&p) + } + + /// Queries the `registry` to return a list of candidates for `dep`. + /// + /// This method is the location where overrides are taken into account. If + /// any candidates are returned which match an override then the override is + /// applied by performing a second query for what the override should + /// return. + pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { + if let Some(out) = self.registry_cache.get(dep).cloned() { + return Ok(out); + } + + let mut ret = Vec::new(); + self.registry.query( + dep, + &mut |s| { + ret.push(s); + }, + false, + )?; + for summary in ret.iter_mut() { + let mut potential_matches = self + .replacements + .iter() + .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); + + let &(ref spec, ref dep) = match potential_matches.next() { + None => continue, + Some(replacement) => replacement, + }; + debug!( + "found an override for {} {}", + dep.package_name(), + dep.version_req() + ); + + let mut summaries = self.registry.query_vec(dep, false)?.into_iter(); + let s = summaries.next().ok_or_else(|| { + failure::format_err!( + "no matching package for override `{}` found\n\ + location searched: {}\n\ + version required: {}", + spec, + dep.source_id(), + dep.version_req() + ) + })?; + let summaries = summaries.collect::>(); + if !summaries.is_empty() { + let bullets = summaries + .iter() + .map(|s| format!(" * {}", s.package_id())) + .collect::>(); + failure::bail!( + "the replacement specification `{}` matched \ + multiple packages:\n * {}\n{}", + spec, + s.package_id(), + bullets.join("\n") + ); + } + + // The dependency should be hard-coded to have the same name and an + // exact version requirement, so both of these assertions should + // never fail. + assert_eq!(s.version(), summary.version()); + assert_eq!(s.name(), summary.name()); + + let replace = if s.source_id() == summary.source_id() { + debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); + None + } else { + Some(s) + }; + let matched_spec = spec.clone(); + + // Make sure no duplicates + if let Some(&(ref spec, _)) = potential_matches.next() { + failure::bail!( + "overlapping replacement specifications found:\n\n \ + * {}\n * {}\n\nboth specifications match: {}", + matched_spec, + spec, + summary.package_id() + ); + } + + for dep in summary.dependencies() { + debug!("\t{} => {}", dep.package_name(), dep.version_req()); + } + if let Some(r) = replace { + self.used_replacements.insert(summary.package_id(), r); + } + } + + // When we attempt versions for a package we'll want to do so in a + // sorted fashion to pick the "best candidates" first. Currently we try + // prioritized summaries (those in `try_to_use`) and failing that we + // list everything from the maximum version to the lowest version. + ret.sort_unstable_by(|a, b| { + let a_in_previous = self.try_to_use.contains(&a.package_id()); + let b_in_previous = self.try_to_use.contains(&b.package_id()); + let previous_cmp = a_in_previous.cmp(&b_in_previous).reverse(); + match previous_cmp { + Ordering::Equal => { + let cmp = a.version().cmp(b.version()); + if self.minimal_versions { + // Lower version ordered first. + cmp + } else { + // Higher version ordered first. + cmp.reverse() + } + } + _ => previous_cmp, + } + }); + + let out = Rc::new(ret); + + self.registry_cache.insert(dep.clone(), out.clone()); + + Ok(out) + } + + /// Find out what dependencies will be added by activating `candidate`, + /// with features described in `opts`. Then look up in the `registry` + /// the candidates that will fulfil each of these dependencies, as it is the + /// next obvious question. + pub fn build_deps( + &mut self, + parent: Option, + candidate: &Summary, + opts: &ResolveOpts, + ) -> ActivateResult, Rc>)>> { + // if we have calculated a result before, then we can just return it, + // as it is a "pure" query of its arguments. + if let Some(out) = self + .summary_cache + .get(&(parent, candidate.clone(), opts.clone())) + .cloned() + { + return Ok(out); + } + // First, figure out our set of dependencies based on the requested set + // of features. This also calculates what features we're going to enable + // for our own dependencies. + let (used_features, deps) = resolve_features(parent, candidate, opts)?; + + // Next, transform all dependencies into a list of possible candidates + // which can satisfy that dependency. + let mut deps = deps + .into_iter() + .map(|(dep, features)| { + let candidates = self.query(&dep)?; + Ok((dep, candidates, features)) + }) + .collect::>>()?; + + // Attempt to resolve dependencies with fewer candidates before trying + // dependencies with more candidates. This way if the dependency with + // only one candidate can't be resolved we don't have to do a bunch of + // work before we figure that out. + deps.sort_by_key(|&(_, ref a, _)| a.len()); + + let out = Rc::new((used_features, Rc::new(deps))); + + // If we succeed we add the result to the cache so we can use it again next time. + // We dont cache the failure cases as they dont impl Clone. + self.summary_cache + .insert((parent, candidate.clone(), opts.clone()), out.clone()); + + Ok(out) + } +} + +/// Returns the features we ended up using and +/// all dependencies and the features we want from each of them. +pub fn resolve_features<'b>( + parent: Option, + s: &'b Summary, + opts: &'b ResolveOpts, +) -> ActivateResult<(HashSet, Vec<(Dependency, FeaturesSet)>)> { + // First, filter by dev-dependencies. + let deps = s.dependencies(); + let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); + + let reqs = build_requirements(s, opts)?; + let mut ret = Vec::new(); + let mut used_features = HashSet::new(); + let default_dep = (false, BTreeSet::new()); + + // Next, collect all actually enabled dependencies and their features. + for dep in deps { + // Skip optional dependencies, but not those enabled through a + // feature + if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { + continue; + } + // So we want this dependency. Move the features we want from + // `feature_deps` to `ret` and register ourselves as using this + // name. + let base = reqs.deps.get(&dep.name_in_toml()).unwrap_or(&default_dep); + used_features.insert(dep.name_in_toml()); + let always_required = !dep.is_optional() + && !s + .dependencies() + .iter() + .any(|d| d.is_optional() && d.name_in_toml() == dep.name_in_toml()); + if always_required && base.0 { + return Err(match parent { + None => failure::format_err!( + "Package `{}` does not have feature `{}`. It has a required dependency \ + with that name, but only optional dependencies can be used as features.", + s.package_id(), + dep.name_in_toml() + ) + .into(), + Some(p) => ( + p, + ConflictReason::RequiredDependencyAsFeatures(dep.name_in_toml()), + ) + .into(), + }); + } + let mut base = base.1.clone(); + base.extend(dep.features().iter()); + for feature in base.iter() { + if feature.contains('/') { + return Err(failure::format_err!( + "feature names may not contain slashes: `{}`", + feature + ) + .into()); + } + } + ret.push((dep.clone(), Rc::new(base))); + } + + // Any entries in `reqs.dep` which weren't used are bugs in that the + // package does not actually have those dependencies. We classified + // them as dependencies in the first place because there is no such + // feature, either. + let remaining = reqs + .deps + .keys() + .cloned() + .filter(|s| !used_features.contains(s)) + .collect::>(); + if !remaining.is_empty() { + let features = remaining.join(", "); + return Err(match parent { + None => failure::format_err!( + "Package `{}` does not have these features: `{}`", + s.package_id(), + features + ) + .into(), + Some(p) => (p, ConflictReason::MissingFeatures(features)).into(), + }); + } + + Ok((reqs.into_used(), ret)) +} + +/// Takes requested features for a single package from the input `ResolveOpts` and +/// recurses to find all requested features, dependencies and requested +/// dependency features in a `Requirements` object, returning it to the resolver. +fn build_requirements<'a, 'b: 'a>( + s: &'a Summary, + opts: &'b ResolveOpts, +) -> CargoResult> { + let mut reqs = Requirements::new(s); + + if opts.all_features { + for key in s.features().keys() { + reqs.require_feature(*key)?; + } + for dep in s.dependencies().iter().filter(|d| d.is_optional()) { + reqs.require_dependency(dep.name_in_toml()); + } + } else { + for &f in opts.features.iter() { + reqs.require_value(&FeatureValue::new(f, s))?; + } + } + + if opts.uses_default_features { + if s.features().contains_key("default") { + reqs.require_feature(InternedString::new("default"))?; + } + } + + Ok(reqs) +} + +struct Requirements<'a> { + summary: &'a Summary, + // The deps map is a mapping of package name to list of features enabled. + // Each package should be enabled, and each package should have the + // specified set of features enabled. The boolean indicates whether this + // package was specifically requested (rather than just requesting features + // *within* this package). + deps: HashMap)>, + // The used features set is the set of features which this local package had + // enabled, which is later used when compiling to instruct the code what + // features were enabled. + used: HashSet, + visited: HashSet, +} + +impl Requirements<'_> { + fn new(summary: &Summary) -> Requirements<'_> { + Requirements { + summary, + deps: HashMap::new(), + used: HashSet::new(), + visited: HashSet::new(), + } + } + + fn into_used(self) -> HashSet { + self.used + } + + fn require_crate_feature(&mut self, package: InternedString, feat: InternedString) { + // If `package` is indeed an optional dependency then we activate the + // feature named `package`, but otherwise if `package` is a required + // dependency then there's no feature associated with it. + if let Some(dep) = self + .summary + .dependencies() + .iter() + .find(|p| p.name_in_toml() == package) + { + if dep.is_optional() { + self.used.insert(package); + } + } + self.deps + .entry(package) + .or_insert((false, BTreeSet::new())) + .1 + .insert(feat); + } + + fn seen(&mut self, feat: InternedString) -> bool { + if self.visited.insert(feat) { + self.used.insert(feat); + false + } else { + true + } + } + + fn require_dependency(&mut self, pkg: InternedString) { + if self.seen(pkg) { + return; + } + self.deps.entry(pkg).or_insert((false, BTreeSet::new())).0 = true; + } + + fn require_feature(&mut self, feat: InternedString) -> CargoResult<()> { + if feat.is_empty() || self.seen(feat) { + return Ok(()); + } + let feature = self + .summary + .features() + .get(feat.as_str()) + .expect("must be a valid feature"); + for fv in feature.1.as_slice() { + match *fv { + FeatureValue::Feature(ref dep_feat) if **dep_feat == *feat => failure::bail!( + "cyclic feature dependency: feature `{}` depends on itself", + feat + ), + _ => {} + } + self.require_value(fv)?; + } + Ok(()) + } + + fn require_value(&mut self, fv: &FeatureValue) -> CargoResult<()> { + match fv { + FeatureValue::Feature(feat) => self.require_feature(*feat)?, + FeatureValue::Crate(dep) => self.require_dependency(*dep), + FeatureValue::CrateFeature(dep, dep_feat) => { + self.require_crate_feature(*dep, *dep_feat) + } + }; + Ok(()) + } +} diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 9ef4a4ff46f..b60b3a20797 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -1,200 +1,639 @@ -use std::collections::{HashMap, BTreeMap}; +//! Definition of how to encode a `Resolve` into a TOML `Cargo.lock` file +//! +//! This module contains all machinery necessary to parse a `Resolve` from a +//! `Cargo.lock` as well as serialize a `Resolve` to a `Cargo.lock`. +//! +//! ## Changing `Cargo.lock` +//! +//! In general Cargo is quite conservative about changing the format of +//! `Cargo.lock`. Usage of new features in Cargo can change `Cargo.lock` at any +//! time, but otherwise changing the serialization of `Cargo.lock` is a +//! difficult operation to do that we typically avoid. +//! +//! The main problem with changing the format of `Cargo.lock` is that it can +//! cause quite a bad experience for end users who use different versions of +//! Cargo. If every PR to a project oscillates between the stable channel's +//! encoding of Cargo.lock and the nightly channel's encoding then that's a +//! pretty bad experience. +//! +//! We do, however, want to change `Cargo.lock` over time. (and we have!). To do +//! this the rules that we currently have are: +//! +//! * Add support for the new format to Cargo +//! * Continue to, by default, generate the old format +//! * Preserve the new format if found +//! * Wait a "long time" (e.g. 6 months or so) +//! * Change Cargo to by default emit the new format +//! +//! This migration scheme in general means that Cargo we'll get *support* for a +//! new format into Cargo ASAP, but it won't really be exercised yet (except in +//! Cargo's own tests really). Eventually when stable/beta/nightly all have +//! support for the new format (and maybe a few previous stable versions) we +//! flip the switch. Projects on nightly will quickly start seeing changes, but +//! stable/beta/nightly will all understand this new format and will preserve +//! it. +//! +//! While this does mean that projects' `Cargo.lock` changes over time, it's +//! typically a pretty minimal effort change that's just "check in what's +//! there". +//! +//! ## Historical changes to `Cargo.lock` +//! +//! Listed from most recent to oldest, these are some of the changes we've made +//! to `Cargo.lock`'s serialization format: +//! +//! * The entries in `dependencies` arrays have been shortened and the +//! `checksum` field now shows up directly in `[[package]]` instead of always +//! at the end of the file. The goal of this change was to ideally reduce +//! merge conflicts being generated on `Cargo.lock`. Updating a version of a +//! package now only updates two lines in the file, the checksum and the +//! version number, most of the time. Dependency edges are specified in a +//! compact form where possible where just the name is listed. The +//! version/source on dependency edges are only listed if necessary to +//! disambiguate which version or which source is in use. +//! +//! * A comment at the top of the file indicates that the file is a generated +//! file and contains the special symbol `@generated` to indicate to common +//! review tools that it's a generated file. +//! +//! * A `[root]` entry for the "root crate" has been removed and instead now +//! included in `[[package]]` like everything else. +//! +//! * All packages from registries contain a `checksum` which is a sha256 +//! checksum of the tarball the package is associated with. This is all stored +//! in the `[metadata]` table of `Cargo.lock` which all versions of Cargo +//! since 1.0 have preserved. The goal of this was to start recording +//! checksums so mirror sources can be verified. +//! +//! ## Other oddities about `Cargo.lock` +//! +//! There's a few other miscellaneous weird things about `Cargo.lock` that you +//! may want to be aware of when reading this file: +//! +//! * All packages have a `source` listed to indicate where they come from. For +//! `path` dependencies, however, no `source` is listed. There's no way we +//! could emit a filesystem path name and have that be portable across +//! systems, so all packages from a `path` are not listed with a `source`. +//! Note that this also means that all packages with `path` sources must have +//! unique names. +//! +//! * The `[metadata]` table in `Cargo.lock` is intended to be a generic mapping +//! of strings to strings that's simply preserved by Cargo. This was a very +//! early effort to be forward compatible against changes to `Cargo.lock`'s +//! format. This is nowadays sort of deemed a bad idea though and we don't +//! really use it that much except for `checksum`s historically. It's not +//! really recommended to use this. +//! +//! * The actual literal on-disk serialiation is found in +//! `src/cargo/ops/lockfile.rs` which basically renders a `toml::Value` in a +//! special fashion to make sure we have strict control over the on-disk +//! format. -use regex::Regex; -use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; +use std::collections::{BTreeMap, HashMap, HashSet}; +use std::fmt; +use std::str::FromStr; -use core::{PackageId, SourceId}; -use util::{CargoResult, Graph}; +use log::debug; +use serde::de; +use serde::ser; +use serde::{Deserialize, Serialize}; -use super::Resolve; +use crate::core::InternedString; +use crate::core::{Dependency, Package, PackageId, SourceId, Workspace}; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{internal, Graph}; -#[derive(RustcEncodable, RustcDecodable, Debug)] +use super::{Resolve, ResolveVersion}; + +/// The `Cargo.lock` structure. +#[derive(Serialize, Deserialize, Debug)] pub struct EncodableResolve { package: Option>, - root: EncodableDependency, + /// `root` is optional to allow backward compatibility. + root: Option, metadata: Option, + #[serde(default, skip_serializing_if = "Patch::is_empty")] + patch: Patch, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +struct Patch { + unused: Vec, } pub type Metadata = BTreeMap; impl EncodableResolve { - pub fn to_resolve(&self, default: &SourceId) -> CargoResult { - let mut g = Graph::new(); - let mut tmp = HashMap::new(); - - let packages = Vec::new(); - let packages = self.package.as_ref().unwrap_or(&packages); - - { - let mut register_pkg = |pkg: &EncodableDependency| - -> CargoResult<()> { - let pkgid = try!(pkg.to_package_id(default)); - let precise = pkgid.source_id().precise() - .map(|s| s.to_string()); - assert!(tmp.insert(pkgid.clone(), precise).is_none(), - "a package was referenced twice in the lockfile"); - g.add(try!(pkg.to_package_id(default)), &[]); - Ok(()) - }; + /// Convert a `Cargo.lock` to a Resolve. + /// + /// Note that this `Resolve` is not "complete". For example, the + /// dependencies do not know the difference between regular/dev/build + /// dependencies, so they are not filled in. It also does not include + /// `features`. Care should be taken when using this Resolve. One of the + /// primary uses is to be used with `resolve_with_previous` to guide the + /// resolver to create a complete Resolve. + pub fn into_resolve(self, ws: &Workspace<'_>) -> CargoResult { + let path_deps = build_path_deps(ws); + let mut checksums = HashMap::new(); + + // We assume an older format is being parsed until we see so otherwise. + let mut version = ResolveVersion::V1; + + let packages = { + let mut packages = self.package.unwrap_or_default(); + if let Some(root) = self.root { + packages.insert(0, root); + } + packages + }; - try!(register_pkg(&self.root)); + // `PackageId`s in the lock file don't include the `source` part + // for workspace members, so we reconstruct proper IDs. + let live_pkgs = { + let mut live_pkgs = HashMap::new(); + let mut all_pkgs = HashSet::new(); for pkg in packages.iter() { - try!(register_pkg(pkg)); + let enc_id = EncodablePackageId { + name: pkg.name.clone(), + version: Some(pkg.version.clone()), + source: pkg.source, + }; + + if !all_pkgs.insert(enc_id.clone()) { + failure::bail!("package `{}` is specified twice in the lockfile", pkg.name); + } + let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { + // We failed to find a local package in the workspace. + // It must have been removed and should be ignored. + None => { + debug!("path dependency now missing {} v{}", pkg.name, pkg.version); + continue; + } + Some(&source) => PackageId::new(&pkg.name, &pkg.version, source)?, + }; + + // If a package has a checksum listed directly on it then record + // that here, and we also bump our version up to 2 since V1 + // didn't ever encode this field. + if let Some(cksum) = &pkg.checksum { + version = ResolveVersion::V2; + checksums.insert(id, Some(cksum.clone())); + } + + assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none()) } + live_pkgs + }; + + // When decoding a V2 version the edges in `dependencies` aren't + // guaranteed to have either version or source information. This `map` + // is used to find package ids even if dependencies have missing + // information. This map is from name to version to source to actual + // package ID. (various levels to drill down step by step) + let mut map = HashMap::new(); + for (id, _) in live_pkgs.values() { + map.entry(id.name().as_str()) + .or_insert(HashMap::new()) + .entry(id.version().to_string()) + .or_insert(HashMap::new()) + .insert(id.source_id(), *id); } - { - let mut add_dependencies = |pkg: &EncodableDependency| - -> CargoResult<()> { - let package_id = try!(pkg.to_package_id(default)); + let mut lookup_id = |enc_id: &EncodablePackageId| -> Option { + // The name of this package should always be in the larger list of + // all packages. + let by_version = map.get(enc_id.name.as_str())?; - let deps = match pkg.dependencies { - Some(ref deps) => deps, - None => return Ok(()), - }; - for edge in deps.iter() { - let to_depend_on = try!(edge.to_package_id(default)); - let precise_pkgid = - tmp.get(&to_depend_on) - .map(|p| to_depend_on.with_precise(p.clone())) - .unwrap_or(to_depend_on.clone()); - g.link(package_id.clone(), precise_pkgid); + // If the version is provided, look that up. Otherwise if the + // version isn't provided this is a V2 manifest and we should only + // have one version for this name. If we have more than one version + // for the name then it's ambiguous which one we'd use. That + // shouldn't ever actually happen but in theory bad git merges could + // produce invalid lock files, so silently ignore these cases. + let by_source = match &enc_id.version { + Some(version) => by_version.get(version)?, + None => { + version = ResolveVersion::V2; + if by_version.len() == 1 { + by_version.values().next().unwrap() + } else { + return None; + } } - Ok(()) }; - try!(add_dependencies(&self.root)); - for pkg in packages.iter() { - try!(add_dependencies(pkg)); + // This is basically the same as above. Note though that `source` is + // always missing for path dependencies regardless of serialization + // format. That means we have to handle the `None` case a bit more + // carefully. + match &enc_id.source { + Some(source) => by_source.get(source).cloned(), + None => { + // Look through all possible packages ids for this + // name/version. If there's only one `path` dependency then + // we are hardcoded to use that since `path` dependencies + // can't have a source listed. + let mut path_packages = by_source.values().filter(|p| p.source_id().is_path()); + if let Some(path) = path_packages.next() { + if path_packages.next().is_some() { + return None; + } + Some(*path) + + // ... otherwise if there's only one then we must be + // implicitly using that one due to a V2 serialization of + // the lock file + } else if by_source.len() == 1 { + let id = by_source.values().next().unwrap(); + version = ResolveVersion::V2; + Some(*id) + + // ... and failing that we probably had a bad git merge of + // `Cargo.lock` or something like that, so just ignore this. + } else { + None + } + } } + }; + + let mut g = Graph::new(); + + for &(ref id, _) in live_pkgs.values() { + g.add(id.clone()); } - Ok(Resolve { - graph: g, - root: try!(self.root.to_package_id(default)), - features: HashMap::new(), - metadata: self.metadata.clone(), - }) + for &(ref id, pkg) in live_pkgs.values() { + let deps = match pkg.dependencies { + Some(ref deps) => deps, + None => continue, + }; + + for edge in deps.iter() { + if let Some(to_depend_on) = lookup_id(edge) { + g.link(id.clone(), to_depend_on); + } + } + } + + let replacements = { + let mut replacements = HashMap::new(); + for &(ref id, pkg) in live_pkgs.values() { + if let Some(ref replace) = pkg.replace { + assert!(pkg.dependencies.is_none()); + if let Some(replace_id) = lookup_id(replace) { + replacements.insert(id.clone(), replace_id); + } + } + } + replacements + }; + + let mut metadata = self.metadata.unwrap_or_default(); + + // In the V1 serialization formats all checksums were listed in the lock + // file in the `[metadata]` section, so if we're still V1 then look for + // that here. + let prefix = "checksum "; + let mut to_remove = Vec::new(); + for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { + to_remove.push(k.to_string()); + let k = &k[prefix.len()..]; + let enc_id: EncodablePackageId = k + .parse() + .chain_err(|| internal("invalid encoding of checksum in lockfile"))?; + let id = match lookup_id(&enc_id) { + Some(id) => id, + _ => continue, + }; + + let v = if v == "" { + None + } else { + Some(v.to_string()) + }; + checksums.insert(id, v); + } + // If `checksum` was listed in `[metadata]` but we were previously + // listed as `V2` then assume some sort of bad git merge happened, so + // discard all checksums and let's regenerate them later. + if to_remove.len() > 0 && version == ResolveVersion::V2 { + checksums.drain(); + } + for k in to_remove { + metadata.remove(&k); + } + + let mut unused_patches = Vec::new(); + for pkg in self.patch.unused { + let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { + Some(&src) => PackageId::new(&pkg.name, &pkg.version, src)?, + None => continue, + }; + unused_patches.push(id); + } + + Ok(Resolve::new( + g, + replacements, + HashMap::new(), + checksums, + metadata, + unused_patches, + version, + )) } } -#[derive(RustcEncodable, RustcDecodable, Debug, PartialOrd, Ord, PartialEq, Eq)] +fn build_path_deps(ws: &Workspace<'_>) -> HashMap { + // If a crate is **not** a path source, then we're probably in a situation + // such as `cargo install` with a lock file from a remote dependency. In + // that case we don't need to fixup any path dependencies (as they're not + // actually path dependencies any more), so we ignore them. + let members = ws + .members() + .filter(|p| p.package_id().source_id().is_path()) + .collect::>(); + + let mut ret = HashMap::new(); + let mut visited = HashSet::new(); + for member in members.iter() { + ret.insert( + member.package_id().name().to_string(), + member.package_id().source_id(), + ); + visited.insert(member.package_id().source_id()); + } + for member in members.iter() { + build_pkg(member, ws, &mut ret, &mut visited); + } + for deps in ws.root_patch().values() { + for dep in deps { + build_dep(dep, ws, &mut ret, &mut visited); + } + } + for &(_, ref dep) in ws.root_replace() { + build_dep(dep, ws, &mut ret, &mut visited); + } + + return ret; + + fn build_pkg( + pkg: &Package, + ws: &Workspace<'_>, + ret: &mut HashMap, + visited: &mut HashSet, + ) { + for dep in pkg.dependencies() { + build_dep(dep, ws, ret, visited); + } + } + + fn build_dep( + dep: &Dependency, + ws: &Workspace<'_>, + ret: &mut HashMap, + visited: &mut HashSet, + ) { + let id = dep.source_id(); + if visited.contains(&id) || !id.is_path() { + return; + } + let path = match id.url().to_file_path() { + Ok(p) => p.join("Cargo.toml"), + Err(_) => return, + }; + let pkg = match ws.load(&path) { + Ok(p) => p, + Err(_) => return, + }; + ret.insert(pkg.name().to_string(), pkg.package_id().source_id()); + visited.insert(pkg.package_id().source_id()); + build_pkg(&pkg, ws, ret, visited); + } +} + +impl Patch { + fn is_empty(&self) -> bool { + self.unused.is_empty() + } +} + +#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)] pub struct EncodableDependency { name: String, version: String, source: Option, - dependencies: Option> -} - -impl EncodableDependency { - fn to_package_id(&self, default_source: &SourceId) -> CargoResult { - PackageId::new( - &self.name, - &self.version, - self.source.as_ref().unwrap_or(default_source)) - } + checksum: Option, + dependencies: Option>, + replace: Option, } -#[derive(Debug, PartialOrd, Ord, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)] pub struct EncodablePackageId { name: String, - version: String, - source: Option + version: Option, + source: Option, } -impl Encodable for EncodablePackageId { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let mut out = format!("{} {}", self.name, self.version); - if let Some(ref s) = self.source { - out.push_str(&format!(" ({})", s.to_url())); +impl fmt::Display for EncodablePackageId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name)?; + if let Some(s) = &self.version { + write!(f, " {}", s)?; } - out.encode(s) + if let Some(s) = &self.source { + write!(f, " ({})", s.into_url())?; + } + Ok(()) } } -impl Decodable for EncodablePackageId { - fn decode(d: &mut D) -> Result { - let string: String = try!(Decodable::decode(d)); - let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); - let captures = regex.captures(&string) - .expect("invalid serialized PackageId"); - - let name = captures.at(1).unwrap(); - let version = captures.at(2).unwrap(); +impl FromStr for EncodablePackageId { + type Err = failure::Error; - let source = captures.at(3); - - let source_id = source.map(|s| SourceId::from_url(s.to_string())); + fn from_str(s: &str) -> CargoResult { + let mut s = s.splitn(3, ' '); + let name = s.next().unwrap(); + let version = s.next(); + let source_id = match s.next() { + Some(s) => { + if s.starts_with('(') && s.ends_with(')') { + Some(SourceId::from_url(&s[1..s.len() - 1])?) + } else { + failure::bail!("invalid serialized PackageId") + } + } + None => None, + }; Ok(EncodablePackageId { name: name.to_string(), - version: version.to_string(), - source: source_id + version: version.map(|v| v.to_string()), + source: source_id, }) } } -impl EncodablePackageId { - fn to_package_id(&self, default_source: &SourceId) -> CargoResult { - PackageId::new( - &self.name, - &self.version, - self.source.as_ref().unwrap_or(default_source)) +impl ser::Serialize for EncodablePackageId { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + s.collect_str(self) } } -impl Encodable for Resolve { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let mut ids: Vec<&PackageId> = self.graph.iter().collect(); +impl<'de> de::Deserialize<'de> for EncodablePackageId { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + String::deserialize(d).and_then(|string| { + string + .parse::() + .map_err(de::Error::custom) + }) + } +} + +impl<'a> ser::Serialize for Resolve { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + let mut ids: Vec<_> = self.iter().collect(); ids.sort(); - let encodable = ids.iter().filter_map(|&id| { - if self.root == *id { return None; } + let state = EncodeState::new(self); + + let encodable = ids + .iter() + .map(|&id| encodable_resolve_node(id, self, &state)) + .collect::>(); + + let mut metadata = self.metadata().clone(); - Some(encodable_resolve_node(id, &self.root, &self.graph)) - }).collect::>(); + if *self.version() == ResolveVersion::V1 { + for &id in ids.iter().filter(|id| !id.source_id().is_path()) { + let checksum = match self.checksums()[&id] { + Some(ref s) => &s[..], + None => "", + }; + let id = encodable_package_id(id, &state); + metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string()); + } + } + + let metadata = if metadata.is_empty() { + None + } else { + Some(metadata) + }; + let patch = Patch { + unused: self + .unused_patches() + .iter() + .map(|id| EncodableDependency { + name: id.name().to_string(), + version: id.version().to_string(), + source: encode_source(id.source_id()), + dependencies: None, + replace: None, + checksum: match self.version() { + ResolveVersion::V2 => self.checksums().get(&id).and_then(|x| x.clone()), + ResolveVersion::V1 => None, + }, + }) + .collect(), + }; EncodableResolve { package: Some(encodable), - root: encodable_resolve_node(&self.root, &self.root, &self.graph), - metadata: self.metadata.clone(), - }.encode(s) + root: None, + metadata, + patch, + } + .serialize(s) } } -fn encodable_resolve_node(id: &PackageId, root: &PackageId, - graph: &Graph) -> EncodableDependency { - let deps = graph.edges(id).map(|edge| { - let mut deps = edge.map(|e| { - encodable_package_id(e, root) - }).collect::>(); - deps.sort(); - deps - }); +pub struct EncodeState<'a> { + counts: Option>>, +} - let source = if id.source_id() == root.source_id() { - None - } else { - Some(id.source_id().clone()) +impl<'a> EncodeState<'a> { + pub fn new(resolve: &'a Resolve) -> EncodeState<'a> { + let mut counts = None; + if *resolve.version() == ResolveVersion::V2 { + let mut map = HashMap::new(); + for id in resolve.iter() { + let slot = map + .entry(id.name()) + .or_insert(HashMap::new()) + .entry(id.version()) + .or_insert(0); + *slot += 1; + } + counts = Some(map); + } + EncodeState { counts } + } +} + +fn encodable_resolve_node( + id: PackageId, + resolve: &Resolve, + state: &EncodeState<'_>, +) -> EncodableDependency { + let (replace, deps) = match resolve.replacement(id) { + Some(id) => (Some(encodable_package_id(id, state)), None), + None => { + let mut deps = resolve + .deps_not_replaced(id) + .map(|(id, _)| encodable_package_id(id, state)) + .collect::>(); + deps.sort(); + (None, Some(deps)) + } }; EncodableDependency { name: id.name().to_string(), version: id.version().to_string(), - source: source, + source: encode_source(id.source_id()), dependencies: deps, + replace, + checksum: match resolve.version() { + ResolveVersion::V2 => resolve.checksums().get(&id).and_then(|s| s.clone()), + ResolveVersion::V1 => None, + }, } } -fn encodable_package_id(id: &PackageId, root: &PackageId) -> EncodablePackageId { - let source = if id.source_id() == root.source_id() { - None - } else { - Some(id.source_id().with_precise(None)) - }; +pub fn encodable_package_id(id: PackageId, state: &EncodeState<'_>) -> EncodablePackageId { + let mut version = Some(id.version().to_string()); + let mut source = encode_source(id.source_id()).map(|s| s.with_precise(None)); + if let Some(counts) = &state.counts { + let version_counts = &counts[&id.name()]; + if version_counts[&id.version()] == 1 { + source = None; + if version_counts.len() == 1 { + version = None; + } + } + } EncodablePackageId { name: id.name().to_string(), - version: id.version().to_string(), - source: source, + version, + source, + } +} + +fn encode_source(id: SourceId) -> Option { + if id.is_path() { + None + } else { + Some(id) } } diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs new file mode 100644 index 00000000000..e67ec9ede01 --- /dev/null +++ b/src/cargo/core/resolver/errors.rs @@ -0,0 +1,323 @@ +use std::fmt; + +use crate::core::{Dependency, PackageId, Registry, Summary}; +use crate::util::lev_distance::lev_distance; +use crate::util::Config; +use failure::{Error, Fail}; +use semver; + +use super::context::Context; +use super::types::{ConflictMap, ConflictReason}; + +/// Error during resolution providing a path of `PackageId`s. +pub struct ResolveError { + cause: Error, + package_path: Vec, +} + +impl ResolveError { + pub fn new>(cause: E, package_path: Vec) -> Self { + Self { + cause: cause.into(), + package_path, + } + } + + /// Returns a path of packages from the package whose requirements could not be resolved up to + /// the root. + pub fn package_path(&self) -> &[PackageId] { + &self.package_path + } +} + +impl Fail for ResolveError { + fn cause(&self) -> Option<&dyn Fail> { + self.cause.as_fail().cause() + } +} + +impl fmt::Debug for ResolveError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.cause.fmt(f) + } +} + +impl fmt::Display for ResolveError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.cause.fmt(f) + } +} + +pub type ActivateResult = Result; + +#[derive(Debug)] +pub enum ActivateError { + Fatal(failure::Error), + Conflict(PackageId, ConflictReason), +} + +impl From<::failure::Error> for ActivateError { + fn from(t: ::failure::Error) -> Self { + ActivateError::Fatal(t) + } +} + +impl From<(PackageId, ConflictReason)> for ActivateError { + fn from(t: (PackageId, ConflictReason)) -> Self { + ActivateError::Conflict(t.0, t.1) + } +} + +pub(super) fn activation_error( + cx: &Context, + registry: &mut dyn Registry, + parent: &Summary, + dep: &Dependency, + conflicting_activations: &ConflictMap, + candidates: &[Summary], + config: Option<&Config>, +) -> ResolveError { + let to_resolve_err = |err| { + ResolveError::new( + err, + cx.parents + .path_to_bottom(&parent.package_id()) + .into_iter() + .cloned() + .collect(), + ) + }; + + if !candidates.is_empty() { + let mut msg = format!("failed to select a version for `{}`.", dep.package_name()); + msg.push_str("\n ... required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), + )); + + msg.push_str("\nversions that meet the requirements `"); + msg.push_str(&dep.version_req().to_string()); + msg.push_str("` are: "); + msg.push_str( + &candidates + .iter() + .map(|v| v.version()) + .map(|v| v.to_string()) + .collect::>() + .join(", "), + ); + + let mut conflicting_activations: Vec<_> = conflicting_activations.iter().collect(); + conflicting_activations.sort_unstable(); + let (links_errors, mut other_errors): (Vec<_>, Vec<_>) = conflicting_activations + .drain(..) + .rev() + .partition(|&(_, r)| r.is_links()); + + for &(p, r) in links_errors.iter() { + if let ConflictReason::Links(ref link) = *r { + msg.push_str("\n\nthe package `"); + msg.push_str(&*dep.package_name()); + msg.push_str("` links to the native library `"); + msg.push_str(link); + msg.push_str("`, but it conflicts with a previous package which links to `"); + msg.push_str(link); + msg.push_str("` as well:\n"); + } + msg.push_str(&describe_path(&cx.parents.path_to_bottom(p))); + } + + let (features_errors, mut other_errors): (Vec<_>, Vec<_>) = other_errors + .drain(..) + .partition(|&(_, r)| r.is_missing_features()); + + for &(p, r) in features_errors.iter() { + if let ConflictReason::MissingFeatures(ref features) = *r { + msg.push_str("\n\nthe package `"); + msg.push_str(&*p.name()); + msg.push_str("` depends on `"); + msg.push_str(&*dep.package_name()); + msg.push_str("`, with features: `"); + msg.push_str(features); + msg.push_str("` but `"); + msg.push_str(&*dep.package_name()); + msg.push_str("` does not have these features.\n"); + } + // p == parent so the full path is redundant. + } + + let (required_dependency_as_features_errors, other_errors): (Vec<_>, Vec<_>) = other_errors + .drain(..) + .partition(|&(_, r)| r.is_required_dependency_as_features()); + + for &(p, r) in required_dependency_as_features_errors.iter() { + if let ConflictReason::RequiredDependencyAsFeatures(ref features) = *r { + msg.push_str("\n\nthe package `"); + msg.push_str(&*p.name()); + msg.push_str("` depends on `"); + msg.push_str(&*dep.package_name()); + msg.push_str("`, with features: `"); + msg.push_str(features); + msg.push_str("` but `"); + msg.push_str(&*dep.package_name()); + msg.push_str("` does not have these features.\n"); + msg.push_str( + " It has a required dependency with that name, \ + but only optional dependencies can be used as features.\n", + ); + } + // p == parent so the full path is redundant. + } + + if !other_errors.is_empty() { + msg.push_str( + "\n\nall possible versions conflict with \ + previously selected packages.", + ); + } + + for &(p, _) in other_errors.iter() { + msg.push_str("\n\n previously selected "); + msg.push_str(&describe_path(&cx.parents.path_to_bottom(p))); + } + + msg.push_str("\n\nfailed to select a version for `"); + msg.push_str(&*dep.package_name()); + msg.push_str("` which could resolve this conflict"); + + return to_resolve_err(failure::format_err!("{}", msg)); + } + + // We didn't actually find any candidates, so we need to + // give an error message that nothing was found. + // + // Maybe the user mistyped the ver_req? Like `dep="2"` when `dep="0.2"` + // was meant. So we re-query the registry with `deb="*"` so we can + // list a few versions that were actually found. + let all_req = semver::VersionReq::parse("*").unwrap(); + let mut new_dep = dep.clone(); + new_dep.set_version_req(all_req); + let mut candidates = match registry.query_vec(&new_dep, false) { + Ok(candidates) => candidates, + Err(e) => return to_resolve_err(e), + }; + candidates.sort_unstable_by(|a, b| b.version().cmp(a.version())); + + let mut msg = if !candidates.is_empty() { + let versions = { + let mut versions = candidates + .iter() + .take(3) + .map(|cand| cand.version().to_string()) + .collect::>(); + + if candidates.len() > 3 { + versions.push("...".into()); + } + + versions.join(", ") + }; + + let mut msg = format!( + "failed to select a version for the requirement `{} = \"{}\"`\n \ + candidate versions found which didn't match: {}\n \ + location searched: {}\n", + dep.package_name(), + dep.version_req(), + versions, + registry.describe_source(dep.source_id()), + ); + msg.push_str("required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), + )); + + // If we have a path dependency with a locked version, then this may + // indicate that we updated a sub-package and forgot to run `cargo + // update`. In this case try to print a helpful error! + if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { + msg.push_str( + "\nconsider running `cargo update` to update \ + a path dependency's locked version", + ); + } + + if registry.is_replaced(dep.source_id()) { + msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); + } + + msg + } else { + // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` + // was meant. So we try asking the registry for a `fuzzy` search for suggestions. + let mut candidates = Vec::new(); + if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s.name()), true) { + return to_resolve_err(e); + }; + candidates.sort_unstable(); + candidates.dedup(); + let mut candidates: Vec<_> = candidates + .iter() + .map(|n| (lev_distance(&*new_dep.package_name(), &*n), n)) + .filter(|&(d, _)| d < 4) + .collect(); + candidates.sort_by_key(|o| o.0); + let mut msg = format!( + "no matching package named `{}` found\n\ + location searched: {}\n", + dep.package_name(), + dep.source_id() + ); + if !candidates.is_empty() { + let mut names = candidates + .iter() + .take(3) + .map(|c| c.1.as_str()) + .collect::>(); + + if candidates.len() > 3 { + names.push("..."); + } + + msg.push_str("perhaps you meant: "); + msg.push_str(&names.iter().enumerate().fold( + String::default(), + |acc, (i, el)| match i { + 0 => acc + el, + i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, + _ => acc + ", " + el, + }, + )); + msg.push_str("\n"); + } + msg.push_str("required by "); + msg.push_str(&describe_path( + &cx.parents.path_to_bottom(&parent.package_id()), + )); + + msg + }; + + if let Some(config) = config { + if config.offline() { + msg.push_str( + "\nAs a reminder, you're using offline mode (--offline) \ + which can sometimes cause surprising resolution failures, \ + if this error is too confusing you may wish to retry \ + without the offline flag.", + ); + } + } + + to_resolve_err(failure::format_err!("{}", msg)) +} + +/// Returns String representation of dependency chain for a particular `pkgid`. +pub(super) fn describe_path(path: &[&PackageId]) -> String { + use std::fmt::Write; + let mut dep_path_desc = format!("package `{}`", path[0]); + for dep in path[1..].iter() { + write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap(); + } + dep_path_desc +} diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 1ac67168d5d..166fdac90a7 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -1,4 +1,4 @@ -//! Resolution of the entire dependency graph for a crate +//! Resolution of the entire dependency graph for a crate. //! //! This module implements the core logic in taking the world of crates and //! constraints and creating a resolved graph with locked versions for all @@ -6,14 +6,13 @@ //! which is more worried about discovering crates from various sources, this //! module just uses the Registry trait as a source to learn about crates from. //! -//! Actually solving a constraint graph is an NP-hard (or NP-complete, I forget -//! which) problem, this the algorithm is basically a nice heuristic to make -//! sure we get roughly the best answer most of the time. The constraints that -//! we're working with are: +//! Actually solving a constraint graph is an NP-hard problem. This algorithm +//! is basically a nice heuristic to make sure we get roughly the best answer +//! most of the time. The constraints that we're working with are: //! //! 1. Each crate can have any number of dependencies. Each dependency can //! declare a version range that it is compatible with. -//! 2. Crates can be activated with multiple version (e.g. show up in the +//! 2. Crates can be activated with multiple version (e.g., show up in the //! dependency graph twice) so long as each pairwise instance have //! semver-incompatible versions. //! @@ -24,9 +23,11 @@ //! * Never try to activate a crate version which is incompatible. This means we //! only try crates which will actually satisfy a dependency and we won't ever //! try to activate a crate that's semver compatible with something else -//! activatd (as we're only allowed to have one). +//! activated (as we're only allowed to have one) nor try to activate a crate +//! that has the same links attribute as something else +//! activated. //! * Always try to activate the highest version crate first. The default -//! dependency in Cargo (e.g. when you write `foo = "0.1.2"`) is +//! dependency in Cargo (e.g., when you write `foo = "0.1.2"`) is //! semver-compatible, so selecting the highest version possible will allow us //! to hopefully satisfy as many dependencies at once. //! @@ -40,704 +41,1106 @@ //! //! Note that this is a relatively performance-critical portion of Cargo. The //! data that we're processing is proportional to the size of the dependency -//! graph, which can often be quite large (e.g. take a look at Servo). To make +//! graph, which can often be quite large (e.g., take a look at Servo). To make //! matters worse the DFS algorithm we're implemented is inherently quite //! inefficient. When we add the requirement of backtracking on top it means //! that we're implementing something that probably shouldn't be allocating all //! over the place. -use std::collections::HashSet; -use std::collections::hash_map::HashMap; -use std::fmt; -use std::ops::Range; +use std::collections::{BTreeMap, HashMap, HashSet}; +use std::mem; use std::rc::Rc; -use semver; +use std::time::{Duration, Instant}; -use core::{PackageId, Registry, SourceId, Summary, Dependency}; -use core::PackageIdSpec; -use util::{CargoResult, Graph, human, ChainError, CargoError}; -use util::profile; -use util::graph::{Nodes, Edges}; +use log::{debug, trace}; + +use crate::core::PackageIdSpec; +use crate::core::{Dependency, PackageId, Registry, Summary}; +use crate::util::config::Config; +use crate::util::errors::CargoResult; +use crate::util::profile; + +use self::context::Context; +use self::dep_cache::RegistryQueryer; +use self::types::{ConflictMap, ConflictReason, DepsFrame}; +use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress}; -pub use self::encode::{EncodableResolve, EncodableDependency, EncodablePackageId}; pub use self::encode::Metadata; +pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; +pub use self::errors::{ActivateError, ActivateResult, ResolveError}; +pub use self::resolve::{Resolve, ResolveVersion}; +pub use self::types::ResolveOpts; +mod conflict_cache; +mod context; +mod dep_cache; mod encode; +mod errors; +mod resolve; +mod types; -/// Represents a fully resolved package dependency graph. Each node in the graph -/// is a package and edges represent dependencies between packages. +/// Builds the list of all packages required to build the first argument. /// -/// Each instance of `Resolve` also understands the full set of features used -/// for each package as well as what the root package is. -#[derive(PartialEq, Eq, Clone)] -pub struct Resolve { - graph: Graph, - features: HashMap>, - root: PackageId, - metadata: Option, -} +/// * `summaries` - the list of package summaries along with how to resolve +/// their features. This is a list of all top-level packages that are intended +/// to be part of the lock file (resolve output). These typically are a list +/// of all workspace members. +/// +/// * `replacements` - this is a list of `[replace]` directives found in the +/// root of the workspace. The list here is a `PackageIdSpec` of what to +/// replace and a `Dependency` to replace that with. In general it's not +/// recommended to use `[replace]` any more and use `[patch]` instead, which +/// is supported elsewhere. +/// +/// * `registry` - this is the source from which all package summaries are +/// loaded. It's expected that this is extensively configured ahead of time +/// and is idempotent with our requests to it (aka returns the same results +/// for the same query every time). Typically this is an instance of a +/// `PackageRegistry`. +/// +/// * `try_to_use` - this is a list of package IDs which were previously found +/// in the lock file. We heuristically prefer the ids listed in `try_to_use` +/// when sorting candidates to activate, but otherwise this isn't used +/// anywhere else. +/// +/// * `config` - a location to print warnings and such, or `None` if no warnings +/// should be printed +/// +/// * `print_warnings` - whether or not to print backwards-compatibility +/// warnings and such +/// +/// * `check_public_visible_dependencies` - a flag for whether to enforce the restrictions +/// introduced in the "public & private dependencies" RFC (1977). The current implementation +/// makes sure that there is only one version of each name visible to each package. +/// +/// But there are 2 stable ways to directly depend on different versions of the same name. +/// 1. Use the renamed dependencies functionality +/// 2. Use 'cfg({})' dependencies functionality +/// +/// When we have a decision for how to implement is without breaking existing functionality +/// this flag can be removed. +pub fn resolve( + summaries: &[(Summary, ResolveOpts)], + replacements: &[(PackageIdSpec, Dependency)], + registry: &mut dyn Registry, + try_to_use: &HashSet, + config: Option<&Config>, + check_public_visible_dependencies: bool, +) -> CargoResult { + let cx = Context::new(check_public_visible_dependencies); + let _p = profile::start("resolving"); + let minimal_versions = match config { + Some(config) => config.cli_unstable().minimal_versions, + None => false, + }; + let mut registry = RegistryQueryer::new(registry, replacements, try_to_use, minimal_versions); + let cx = activate_deps_loop(cx, &mut registry, summaries, config)?; + + let mut cksums = HashMap::new(); + for (summary, _) in cx.activations.values() { + let cksum = summary.checksum().map(|s| s.to_string()); + cksums.insert(summary.package_id(), cksum); + } + let resolve = Resolve::new( + cx.graph(), + cx.resolve_replacements(®istry), + cx.resolve_features + .iter() + .map(|(k, v)| { + ( + *k, + v.iter() + .map(|x| { + let platform = summaries + .iter() + .find(|(summary, _)| summary.features().get(x).is_some()) + .map(|(summary, _)| summary.features().get(x).unwrap().0.clone()); + let platform = if let Some(platform) = platform { + platform + } else { + None + }; + (x.to_string(), platform) + }) + .collect(), + ) + }) + .collect(), + cksums, + BTreeMap::new(), + Vec::new(), + ResolveVersion::default(), + ); -#[derive(Clone, Copy)] -pub enum Method<'a> { - Everything, - Required { - dev_deps: bool, - features: &'a [String], - uses_default_features: bool, - }, + check_cycles(&resolve)?; + check_duplicate_pkgs_in_lockfile(&resolve)?; + trace!("resolved: {:?}", resolve); + + Ok(resolve) } -// Err(..) == standard transient error (e.g. I/O error) -// Ok(Err(..)) == resolve error, but is human readable -// Ok(Ok(..)) == success in resolving -type ResolveResult = CargoResult>>; - -// Information about the dependencies for a crate, a tuple of: -// -// (dependency info, candidates, features activated) -type DepInfo = (Dependency, Vec>, Vec); - -impl Resolve { - fn new(root: PackageId) -> Resolve { - let mut g = Graph::new(); - g.add(root.clone(), &[]); - Resolve { graph: g, root: root, features: HashMap::new(), metadata: None } - } +/// Recursively activates the dependencies for `top`, in depth-first order, +/// backtracking across possible candidates for each dependency as necessary. +/// +/// If all dependencies can be activated and resolved to a version in the +/// dependency graph, cx.resolve is returned. +fn activate_deps_loop( + mut cx: Context, + registry: &mut RegistryQueryer<'_>, + summaries: &[(Summary, ResolveOpts)], + config: Option<&Config>, +) -> CargoResult { + let mut backtrack_stack = Vec::new(); + let mut remaining_deps = RemainingDeps::new(); - pub fn copy_metadata(&mut self, other: &Resolve) { - self.metadata = other.metadata.clone(); - } + // `past_conflicting_activations` is a cache of the reasons for each time we + // backtrack. + let mut past_conflicting_activations = conflict_cache::ConflictCache::new(); - pub fn iter(&self) -> Nodes { - self.graph.iter() + // Activate all the initial summaries to kick off some work. + for &(ref summary, ref opts) in summaries { + debug!("initial activation: {}", summary.package_id()); + let res = activate(&mut cx, registry, None, summary.clone(), opts.clone()); + match res { + Ok(Some((frame, _))) => remaining_deps.push(frame), + Ok(None) => (), + Err(ActivateError::Fatal(e)) => return Err(e), + Err(ActivateError::Conflict(_, _)) => panic!("bad error from activate"), + } } - pub fn root(&self) -> &PackageId { &self.root } + let mut printed = ResolverProgress::new(); - pub fn deps(&self, pkg: &PackageId) -> Option> { - self.graph.edges(pkg) - } + // Main resolution loop, this is the workhorse of the resolution algorithm. + // + // You'll note that a few stacks are maintained on the side, which might + // seem odd when this algorithm looks like it could be implemented + // recursively. While correct, this is implemented iteratively to avoid + // blowing the stack (the recursion depth is proportional to the size of the + // input). + // + // The general sketch of this loop is to run until there are no dependencies + // left to activate, and for each dependency to attempt to activate all of + // its own dependencies in turn. The `backtrack_stack` is a side table of + // backtracking states where if we hit an error we can return to in order to + // attempt to continue resolving. + while let Some((just_here_for_the_error_messages, frame)) = + remaining_deps.pop_most_constrained() + { + let (mut parent, (mut dep, candidates, mut features)) = frame; - pub fn query(&self, spec: &str) -> CargoResult<&PackageId> { - let spec = try!(PackageIdSpec::parse(spec).chain_error(|| { - human(format!("invalid package id specification: `{}`", spec)) - })); - let mut ids = self.iter().filter(|p| spec.matches(*p)); - let ret = match ids.next() { - Some(id) => id, - None => return Err(human(format!("package id specification `{}` \ - matched no packages", spec))), - }; - return match ids.next() { - Some(other) => { - let mut msg = format!("There are multiple `{}` packages in \ - your project, and the specification \ - `{}` is ambiguous.\n\ - Please re-run this command \ - with `-p ` where `` is one \ - of the following:", - spec.name(), spec); - let mut vec = vec![ret, other]; - vec.extend(ids); - minimize(&mut msg, vec, &spec); - Err(human(msg)) - } - None => Ok(ret) - }; + // If we spend a lot of time here (we shouldn't in most cases) then give + // a bit of a visual indicator as to what we're doing. + printed.shell_status(config)?; - fn minimize(msg: &mut String, - ids: Vec<&PackageId>, - spec: &PackageIdSpec) { - let mut version_cnt = HashMap::new(); - for id in ids.iter() { - *version_cnt.entry(id.version()).or_insert(0) += 1; - } - for id in ids.iter() { - if version_cnt[id.version()] == 1 { - msg.push_str(&format!("\n {}:{}", spec.name(), - id.version())); - } else { - msg.push_str(&format!("\n {}", - PackageIdSpec::from_package_id(*id))); - } - } - } - } + trace!( + "{}[{}]>{} {} candidates", + parent.name(), + cx.age(), + dep.package_name(), + candidates.len() + ); - pub fn features(&self, pkg: &PackageId) -> Option<&HashSet> { - self.features.get(pkg) - } -} + let just_here_for_the_error_messages = just_here_for_the_error_messages + && past_conflicting_activations + .conflicting(&cx, &dep) + .is_some(); -impl fmt::Debug for Resolve { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - try!(write!(fmt, "graph: {:?}\n", self.graph)); - try!(write!(fmt, "\nfeatures: {{\n")); - for (pkg, features) in &self.features { - try!(write!(fmt, " {}: {:?}\n", pkg, features)); - } - write!(fmt, "}}") - } -} + let mut remaining_candidates = RemainingCandidates::new(&candidates); -#[derive(Clone)] -struct Context { - activations: HashMap<(String, SourceId), Vec>>, - resolve: Resolve, - visited: HashSet, -} + // `conflicting_activations` stores all the reasons we were unable to + // activate candidates. One of these reasons will have to go away for + // backtracking to find a place to restart. It is also the list of + // things to explain in the error message if we fail to resolve. + // + // This is a map of package ID to a reason why that packaged caused a + // conflict for us. + let mut conflicting_activations = ConflictMap::new(); -/// Builds the list of all packages required to build the first argument. -pub fn resolve(summary: &Summary, method: &Method, - registry: &mut Registry) -> CargoResult { - trace!("resolve; summary={}", summary.package_id()); - let summary = Rc::new(summary.clone()); - - let cx = Context { - resolve: Resolve::new(summary.package_id().clone()), - activations: HashMap::new(), - visited: HashSet::new(), - }; - let _p = profile::start(format!("resolving: {}", summary.package_id())); - activate_deps_loop(cx, registry, summary, method) -} + // When backtracking we don't fully update `conflicting_activations` + // especially for the cases that we didn't make a backtrack frame in the + // first place. This `backtracked` var stores whether we are continuing + // from a restored backtrack frame so that we can skip caching + // `conflicting_activations` in `past_conflicting_activations` + let mut backtracked = false; -/// Attempts to activate the summary `parent` in the context `cx`. -/// -/// This function will pull dependency summaries from the registry provided, and -/// the dependencies of the package will be determined by the `method` provided. -/// If `parent` was activated, this function returns the dependency frame to -/// iterate through next. -fn activate(cx: &mut Context, - registry: &mut Registry, - parent: Rc, - method: &Method) - -> CargoResult> { - // Dependency graphs are required to be a DAG, so we keep a set of - // packages we're visiting and bail if we hit a dupe. - let id = parent.package_id().clone(); - if !cx.visited.insert(id.clone()) { - return Err(human(format!("cyclic package dependency: package `{}` \ - depends on itself", id))) - } + loop { + let next = remaining_candidates.next( + &mut conflicting_activations, + &cx, + &dep, + parent.package_id(), + ); - // If we're already activated, then that was easy! - if cx.flag_activated(&parent, method) { - cx.visited.remove(&id); - return Ok(None); - } - trace!("activating {}", parent.package_id()); + let (candidate, has_another) = next.ok_or(()).or_else(|_| { + // If we get here then our `remaining_candidates` was just + // exhausted, so `dep` failed to activate. + // + // It's our job here to backtrack, if possible, and find a + // different candidate to activate. If we can't find any + // candidates whatsoever then it's time to bail entirely. + trace!( + "{}[{}]>{} -- no candidates", + parent.name(), + cx.age(), + dep.package_name() + ); - let deps = try!(cx.build_deps(registry, &parent, method)); + // Use our list of `conflicting_activations` to add to our + // global list of past conflicting activations, effectively + // globally poisoning `dep` if `conflicting_activations` ever + // shows up again. We'll use the `past_conflicting_activations` + // below to determine if a dependency is poisoned and skip as + // much work as possible. + // + // If we're only here for the error messages then there's no + // need to try this as this dependency is already known to be + // bad. + // + // As we mentioned above with the `backtracked` variable if this + // local is set to `true` then our `conflicting_activations` may + // not be right, so we can't push into our global cache. + let mut generalize_conflicting_activations = None; + if !just_here_for_the_error_messages && !backtracked { + past_conflicting_activations.insert(&dep, &conflicting_activations); + if let Some(c) = generalize_conflicting( + &cx, + registry, + &mut past_conflicting_activations, + &parent, + &dep, + &conflicting_activations, + ) { + generalize_conflicting_activations = Some(c); + } + } - Ok(Some(DepsFrame{ - parent: parent, - remaining_siblings: RcVecIter::new(deps), - id: id, - })) -} + match find_candidate( + &cx, + &mut backtrack_stack, + &parent, + backtracked, + generalize_conflicting_activations + .as_ref() + .unwrap_or(&conflicting_activations), + ) { + Some((candidate, has_another, frame)) => { + // Reset all of our local variables used with the + // contents of `frame` to complete our backtrack. + cx = frame.context; + remaining_deps = frame.remaining_deps; + remaining_candidates = frame.remaining_candidates; + parent = frame.parent; + dep = frame.dep; + features = frame.features; + conflicting_activations = frame.conflicting_activations; + backtracked = true; + Ok((candidate, has_another)) + } + None => { + debug!("no candidates found"); + Err(errors::activation_error( + &cx, + registry.registry, + &parent, + &dep, + &conflicting_activations, + &candidates, + config, + )) + } + } + })?; -#[derive(Clone)] -struct RcVecIter { - vec: Rc>, - rest: Range, -} + // If we're only here for the error messages then we know that this + // activation will fail one way or another. To that end if we've got + // more candidates we want to fast-forward to the last one as + // otherwise we'll just backtrack here anyway (helping us to skip + // some work). + if just_here_for_the_error_messages && !backtracked && has_another { + continue; + } -impl RcVecIter { - fn new(vec: Vec) -> RcVecIter { - RcVecIter { - rest: 0..vec.len(), - vec: Rc::new(vec), - } - } - fn cur_index(&self) -> usize { - self.rest.start - 1 - } -} -impl Iterator for RcVecIter where T: Clone { - type Item = (usize, T); - fn next(&mut self) -> Option<(usize, T)> { - self.rest.next().and_then(|i| { - self.vec.get(i).map(|val| (i, val.clone())) - }) - } -} + // We have a `candidate`. Create a `BacktrackFrame` so we can add it + // to the `backtrack_stack` later if activation succeeds. + // + // Note that if we don't actually have another candidate then there + // will be nothing to backtrack to so we skip construction of the + // frame. This is a relatively important optimization as a number of + // the `clone` calls below can be quite expensive, so we avoid them + // if we can. + let backtrack = if has_another { + Some(BacktrackFrame { + context: Context::clone(&cx), + remaining_deps: remaining_deps.clone(), + remaining_candidates: remaining_candidates.clone(), + parent: Summary::clone(&parent), + dep: Dependency::clone(&dep), + features: Rc::clone(&features), + conflicting_activations: conflicting_activations.clone(), + }) + } else { + None + }; -#[derive(Clone)] -struct DepsFrame { - parent: Rc, - remaining_siblings: RcVecIter, - id: PackageId, -} + let pid = candidate.package_id(); + let opts = ResolveOpts { + dev_deps: false, + features: Rc::clone(&features), + all_features: false, + uses_default_features: dep.uses_default_features(), + }; + trace!( + "{}[{}]>{} trying {}", + parent.name(), + cx.age(), + dep.package_name(), + candidate.version() + ); + let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, opts); -struct BacktrackFrame { - context_backup: Context, - deps_backup: Vec, - remaining_candidates: RcVecIter>, - parent: Rc, - dep: Dependency, -} + let successfully_activated = match res { + // Success! We've now activated our `candidate` in our context + // and we're almost ready to move on. We may want to scrap this + // frame in the end if it looks like it's not going to end well, + // so figure that out here. + Ok(Some((mut frame, dur))) => { + printed.elapsed(dur); -/// Recursively activates the dependencies for `top`, in depth-first order, -/// backtracking across possible candidates for each dependency as necessary. -/// -/// If all dependencies can be activated and resolved to a version in the -/// dependency graph, cx.resolve is returned. -fn activate_deps_loop(mut cx: Context, - registry: &mut Registry, - top: Rc, - top_method: &Method) -> CargoResult { - let mut backtrack_stack = Vec::new(); - let mut remaining_deps = Vec::new(); - remaining_deps.extend(try!(activate(&mut cx, registry, top, &top_method))); - loop { - // Retrieves the next dependency to try, from `remaining_deps`. - let frame = match remaining_deps.pop() { - None => break, - Some(mut deps_frame) => { - match deps_frame.remaining_siblings.next() { - None => { - cx.visited.remove(&deps_frame.id); - continue + // Our `frame` here is a new package with its own list of + // dependencies. Do a sanity check here of all those + // dependencies by cross-referencing our global + // `past_conflicting_activations`. Recall that map is a + // global cache which lists sets of packages where, when + // activated, the dependency is unresolvable. + // + // If any our our frame's dependencies fit in that bucket, + // aka known unresolvable, then we extend our own set of + // conflicting activations with theirs. We can do this + // because the set of conflicts we found implies the + // dependency can't be activated which implies that we + // ourselves can't be activated, so we know that they + // conflict with us. + let mut has_past_conflicting_dep = just_here_for_the_error_messages; + if !has_past_conflicting_dep { + if let Some(conflicting) = frame + .remaining_siblings + .clone() + .filter_map(|(ref new_dep, _, _)| { + past_conflicting_activations.conflicting(&cx, new_dep) + }) + .next() + { + // If one of our deps is known unresolvable + // then we will not succeed. + // How ever if we are part of the reason that + // one of our deps conflicts then + // we can make a stronger statement + // because we will definitely be activated when + // we try our dep. + conflicting_activations.extend( + conflicting + .iter() + .filter(|&(p, _)| p != &pid) + .map(|(&p, r)| (p, r.clone())), + ); + + has_past_conflicting_dep = true; + } + } + // If any of `remaining_deps` are known unresolvable with + // us activated, then we extend our own set of + // conflicting activations with theirs and its parent. We can do this + // because the set of conflicts we found implies the + // dependency can't be activated which implies that we + // ourselves are incompatible with that dep, so we know that deps + // parent conflict with us. + if !has_past_conflicting_dep { + if let Some(known_related_bad_deps) = + past_conflicting_activations.dependencies_conflicting_with(pid) + { + if let Some((other_parent, conflict)) = remaining_deps + .iter() + // for deps related to us + .filter(|&(_, ref other_dep)| { + known_related_bad_deps.contains(other_dep) + }) + .filter_map(|(other_parent, other_dep)| { + past_conflicting_activations + .find_conflicting(&cx, &other_dep, Some(pid)) + .map(|con| (other_parent, con)) + }) + .next() + { + let rel = conflict.get(&pid).unwrap().clone(); + + // The conflict we found is + // "other dep will not succeed if we are activated." + // We want to add + // "our dep will not succeed if other dep is in remaining_deps" + // but that is not how the cache is set up. + // So we add the less general but much faster, + // "our dep will not succeed if other dep's parent is activated". + conflicting_activations.extend( + conflict + .iter() + .filter(|&(p, _)| p != &pid) + .map(|(&p, r)| (p, r.clone())), + ); + conflicting_activations.insert(other_parent, rel); + has_past_conflicting_dep = true; + } + } + } + + // Ok if we're in a "known failure" state for this frame we + // may want to skip it altogether though. We don't want to + // skip it though in the case that we're displaying error + // messages to the user! + // + // Here we need to figure out if the user will see if we + // skipped this candidate (if it's known to fail, aka has a + // conflicting dep and we're the last candidate). If we're + // here for the error messages, we can't skip it (but we can + // prune extra work). If we don't have any candidates in our + // backtrack stack then we're the last line of defense, so + // we'll want to present an error message for sure. + let activate_for_error_message = has_past_conflicting_dep && !has_another && { + just_here_for_the_error_messages || { + find_candidate( + &cx, + &mut backtrack_stack.clone(), + &parent, + backtracked, + &conflicting_activations, + ) + .is_none() + } + }; + + // If we're only here for the error messages then we know + // one of our candidate deps will fail, meaning we will + // fail and that none of the backtrack frames will find a + // candidate that will help. Consequently let's clean up the + // no longer needed backtrack frames. + if activate_for_error_message { + backtrack_stack.clear(); } - Some((cur, (dep, candidates, features))) => { - let parent = deps_frame.parent.clone(); - remaining_deps.push(deps_frame); - (parent, cur, dep, candidates, features) + + // If we don't know for a fact that we'll fail or if we're + // just here for the error message then we push this frame + // onto our list of to-be-resolve, which will generate more + // work for us later on. + // + // Otherwise we're guaranteed to fail and were not here for + // error messages, so we skip work and don't push anything + // onto our stack. + frame.just_for_error_messages = has_past_conflicting_dep; + if !has_past_conflicting_dep || activate_for_error_message { + remaining_deps.push(frame); + true + } else { + trace!( + "{}[{}]>{} skipping {} ", + parent.name(), + cx.age(), + dep.package_name(), + pid.version() + ); + false } } - } - }; - let (mut parent, mut cur, mut dep, candidates, features) = frame; - assert!(!remaining_deps.is_empty()); - let method = Method::Required { - dev_deps: false, - features: &features, - uses_default_features: dep.uses_default_features(), - }; + // This candidate's already activated, so there's no extra work + // for us to do. Let's keep going. + Ok(None) => true, - let prev_active = cx.prev_active(&dep).to_vec(); - trace!("{}[{}]>{} {} candidates", parent.name(), cur, dep.name(), - candidates.len()); - trace!("{}[{}]>{} {} prev activations", parent.name(), cur, - dep.name(), prev_active.len()); - - // Filter the set of candidates based on the previously activated - // versions for this dependency. We can actually use a version if it - // precisely matches an activated version or if it is otherwise - // incompatible with all other activated versions. Note that we define - // "compatible" here in terms of the semver sense where if the left-most - // nonzero digit is the same they're considered compatible. - let my_candidates = candidates.iter().filter(|&b| { - prev_active.iter().any(|a| a == b) || - prev_active.iter().all(|a| { - !compatible(a.version(), b.version()) - }) - }).cloned().collect(); + // We failed with a super fatal error (like a network error), so + // bail out as quickly as possible as we can't reliably + // backtrack from errors like these + Err(ActivateError::Fatal(e)) => return Err(e), - // Alright, for each candidate that's gotten this far, it meets the - // following requirements: - // - // 1. The version matches the dependency requirement listed for this - // package - // 2. There are no activated versions for this package which are - // semver-compatible, or there's an activated version which is - // precisely equal to `candidate`. - // - // This means that we're going to attempt to activate each candidate in - // turn. We could possibly fail to activate each candidate, so we try - // each one in turn. - let mut remaining_candidates = RcVecIter::new(my_candidates); - let candidate = match remaining_candidates.next() { - Some((_, candidate)) => { - // We have a candidate. Add an entry to the `backtrack_stack` so - // we can try the next one if this one fails. - backtrack_stack.push(BacktrackFrame { - context_backup: cx.clone(), - deps_backup: remaining_deps.clone(), - remaining_candidates: remaining_candidates, - parent: parent.clone(), - dep: dep.clone(), - }); - candidate - } - None => { - // This dependency has no valid candidate. Backtrack until we - // find a dependency that does have a candidate to try, and try - // to activate that one. This resets the `remaining_deps` to - // their state at the found level of the `backtrack_stack`. - trace!("{}[{}]>{} -- no candidates", parent.name(), cur, dep.name()); - match find_candidate(&mut backtrack_stack, &mut cx, - &mut remaining_deps, &mut parent, &mut cur, - &mut dep) { - None => return Err(activation_error(&cx, registry, &parent, - &dep, &prev_active, - &candidates)), - Some(candidate) => candidate, + // We failed due to a bland conflict, bah! Record this in our + // frame's list of conflicting activations as to why this + // candidate failed, and then move on. + Err(ActivateError::Conflict(id, reason)) => { + conflicting_activations.insert(id, reason); + false } - } - }; + }; - trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(), - candidate.version()); - cx.resolve.graph.link(parent.package_id().clone(), - candidate.package_id().clone()); + // If we've successfully activated then save off the backtrack frame + // if one was created, and otherwise break out of the inner + // activation loop as we're ready to move to the next dependency + if successfully_activated { + backtrack_stack.extend(backtrack); + break; + } - // If we hit an intransitive dependency then clear out the visitation - // list as we can't induce a cycle through transitive dependencies. - if !dep.is_transitive() { - cx.visited.clear(); + // We've failed to activate this dependency, oh dear! Our call to + // `activate` above may have altered our `cx` local variable, so + // restore it back if we've got a backtrack frame. + // + // If we don't have a backtrack frame then we're just using the `cx` + // for error messages anyway so we can live with a little + // imprecision. + if let Some(b) = backtrack { + cx = b.context; + } } - remaining_deps.extend(try!(activate(&mut cx, registry, - candidate, &method))); + + // Ok phew, that loop was a big one! If we've broken out then we've + // successfully activated a candidate. Our stacks are all in place that + // we're ready to move on to the next dependency that needs activation, + // so loop back to the top of the function here. } - trace!("resolved: {:?}", cx.resolve); - Ok(cx.resolve) + + Ok(cx) } -// Searches up `backtrack_stack` until it finds a dependency with remaining -// candidates. Resets `cx` and `remaining_deps` to that level and returns the -// next candidate. If all candidates have been exhausted, returns None. -fn find_candidate(backtrack_stack: &mut Vec, - cx: &mut Context, remaining_deps: &mut Vec, - parent: &mut Rc, cur: &mut usize, - dep: &mut Dependency) -> Option> { - while let Some(mut frame) = backtrack_stack.pop() { - if let Some((_, candidate)) = frame.remaining_candidates.next() { - *cx = frame.context_backup.clone(); - *remaining_deps = frame.deps_backup.clone(); - *parent = frame.parent.clone(); - *cur = remaining_deps.last().unwrap().remaining_siblings.cur_index(); - *dep = frame.dep.clone(); - backtrack_stack.push(frame); - return Some(candidate); +/// Attempts to activate the summary `candidate` in the context `cx`. +/// +/// This function will pull dependency summaries from the registry provided, and +/// the dependencies of the package will be determined by the `opts` provided. +/// If `candidate` was activated, this function returns the dependency frame to +/// iterate through next. +fn activate( + cx: &mut Context, + registry: &mut RegistryQueryer<'_>, + parent: Option<(&Summary, &Dependency)>, + candidate: Summary, + opts: ResolveOpts, +) -> ActivateResult> { + let candidate_pid = candidate.package_id(); + if let Some((parent, dep)) = parent { + let parent_pid = parent.package_id(); + Rc::make_mut( + // add a edge from candidate to parent in the parents graph + cx.parents.link(candidate_pid, parent_pid), + ) + // and associate dep with that edge + .push(dep.clone()); + if let Some(public_dependency) = cx.public_dependency.as_mut() { + // one tricky part is that `candidate_pid` may already be active and + // have public dependencies of its own. So we not only need to mark + // `candidate_pid` as visible to its parents but also all of its existing + // public dependencies. + let existing_public_deps: Vec = public_dependency + .get(&candidate_pid) + .iter() + .flat_map(|x| x.values()) + .filter_map(|x| if x.1 { Some(&x.0) } else { None }) + .chain(&Some(candidate_pid)) + .cloned() + .collect(); + for c in existing_public_deps { + // for each (transitive) parent that can newly see `t` + let mut stack = vec![(parent_pid, dep.is_public())]; + while let Some((p, public)) = stack.pop() { + match public_dependency.entry(p).or_default().entry(c.name()) { + im_rc::hashmap::Entry::Occupied(mut o) => { + // the (transitive) parent can already see something by `c`s name, it had better be `c`. + assert_eq!(o.get().0, c); + if o.get().1 { + // The previous time the parent saw `c`, it was a public dependency. + // So all of its parents already know about `c` + // and we can save some time by stopping now. + continue; + } + if public { + // Mark that `c` has now bean seen publicly + o.insert((c, public)); + } + } + im_rc::hashmap::Entry::Vacant(v) => { + // The (transitive) parent does not have anything by `c`s name, + // so we add `c`. + v.insert((c, public)); + } + } + // if `candidate_pid` was a private dependency of `p` then `p` parents can't see `c` thru `p` + if public { + // if it was public, then we add all of `p`s parents to be checked + for &(grand, ref d) in cx.parents.edges(&p) { + stack.push((grand, d.iter().any(|x| x.is_public()))); + } + } + } + } } } - return None; -} -#[allow(deprecated)] // connect => join in 1.3 -fn activation_error(cx: &Context, - registry: &mut Registry, - parent: &Summary, - dep: &Dependency, - prev_active: &[Rc], - candidates: &[Rc]) -> Box { - if candidates.len() > 0 { - let mut msg = format!("failed to select a version for `{}` \ - (required by `{}`):\n\ - all possible versions conflict with \ - previously selected versions of `{}`", - dep.name(), parent.name(), - dep.name()); - 'outer: for v in prev_active.iter() { - for node in cx.resolve.graph.iter() { - let edges = match cx.resolve.graph.edges(node) { - Some(edges) => edges, - None => continue, - }; - for edge in edges { - if edge != v.package_id() { continue } + let activated = cx.flag_activated(&candidate, &opts, parent)?; - msg.push_str(&format!("\n version {} in use by {}", - v.version(), edge)); - continue 'outer; - } + let candidate = match registry.replacement_summary(candidate_pid) { + Some(replace) => { + // Note the `None` for parent here since `[replace]` is a bit wonky + // and doesn't activate the same things that `[patch]` typically + // does. TBH it basically cause panics in the test suite if + // `parent` is passed through here and `[replace]` is otherwise + // on life support so it's not critical to fix bugs anyway per se. + if cx.flag_activated(replace, &opts, None)? && activated { + return Ok(None); } - msg.push_str(&format!("\n version {} in use by ??", - v.version())); + trace!( + "activating {} (replacing {})", + replace.package_id(), + candidate_pid + ); + replace.clone() } + None => { + if activated { + return Ok(None); + } + trace!("activating {}", candidate_pid); + candidate + } + }; - msg.push_str(&format!("\n possible versions to select: {}", - candidates.iter() - .map(|v| v.version()) - .map(|v| v.to_string()) - .collect::>() - .connect(", "))); + let now = Instant::now(); + let (used_features, deps) = + &*registry.build_deps(parent.map(|p| p.0.package_id()), &candidate, &opts)?; - return human(msg) + // Record what list of features is active for this package. + if !used_features.is_empty() { + Rc::make_mut( + cx.resolve_features + .entry(candidate.package_id()) + .or_insert_with(Rc::default), + ) + .extend(used_features); } - // Once we're all the way down here, we're definitely lost in the - // weeds! We didn't actually use any candidates above, so we need to - // give an error message that nothing was found. - // - // Note that we re-query the registry with a new dependency that - // allows any version so we can give some nicer error reporting - // which indicates a few versions that were actually found. - let msg = format!("no matching package named `{}` found \ - (required by `{}`)\n\ - location searched: {}\n\ - version required: {}", - dep.name(), parent.name(), - dep.source_id(), - dep.version_req()); - let mut msg = msg; - let all_req = semver::VersionReq::parse("*").unwrap(); - let new_dep = dep.clone_inner().set_version_req(all_req).into_dependency(); - let mut candidates = match registry.query(&new_dep) { - Ok(candidates) => candidates, - Err(e) => return e, + let frame = DepsFrame { + parent: candidate, + just_for_error_messages: false, + remaining_siblings: RcVecIter::new(Rc::clone(deps)), }; - candidates.sort_by(|a, b| { - b.version().cmp(a.version()) - }); - if candidates.len() > 0 { - msg.push_str("\nversions found: "); - for (i, c) in candidates.iter().take(3).enumerate() { - if i != 0 { msg.push_str(", "); } - msg.push_str(&c.version().to_string()); - } - if candidates.len() > 3 { - msg.push_str(", ..."); - } - } - - // If we have a path dependency with a locked version, then this may - // indicate that we updated a sub-package and forgot to run `cargo - // update`. In this case try to print a helpful error! - if dep.source_id().is_path() && - dep.version_req().to_string().starts_with("=") && - candidates.len() > 0 { - msg.push_str("\nconsider running `cargo update` to update \ - a path dependency's locked version"); + Ok(Some((frame, now.elapsed()))) +} - } - human(msg) +#[derive(Clone)] +struct BacktrackFrame { + context: Context, + remaining_deps: RemainingDeps, + remaining_candidates: RemainingCandidates, + parent: Summary, + dep: Dependency, + features: FeaturesSet, + conflicting_activations: ConflictMap, } -// Returns if `a` and `b` are compatible in the semver sense. This is a -// commutative operation. -// -// Versions `a` and `b` are compatible if their left-most nonzero digit is the -// same. -fn compatible(a: &semver::Version, b: &semver::Version) -> bool { - if a.major != b.major { return false } - if a.major != 0 { return true } - if a.minor != b.minor { return false } - if a.minor != 0 { return true } - a.patch == b.patch +/// A helper "iterator" used to extract candidates within a current `Context` of +/// a dependency graph. +/// +/// This struct doesn't literally implement the `Iterator` trait (requires a few +/// more inputs) but in general acts like one. Each `RemainingCandidates` is +/// created with a list of candidates to choose from. When attempting to iterate +/// over the list of candidates only *valid* candidates are returned. Validity +/// is defined within a `Context`. +/// +/// Candidates passed to `new` may not be returned from `next` as they could be +/// filtered out, and as they are filtered the causes will be added to `conflicting_prev_active`. +#[derive(Clone)] +struct RemainingCandidates { + remaining: RcVecIter, + // This is a inlined peekable generator + has_another: Option, } -// Returns a pair of (feature dependencies, all used features) -// -// The feature dependencies map is a mapping of package name to list of features -// enabled. Each package should be enabled, and each package should have the -// specified set of features enabled. -// -// The all used features set is the set of features which this local package had -// enabled, which is later used when compiling to instruct the code what -// features were enabled. -fn build_features(s: &Summary, method: &Method) - -> CargoResult<(HashMap>, HashSet)> { - let mut deps = HashMap::new(); - let mut used = HashSet::new(); - let mut visited = HashSet::new(); - match *method { - Method::Everything => { - for key in s.features().keys() { - try!(add_feature(s, key, &mut deps, &mut used, &mut visited)); +impl RemainingCandidates { + fn new(candidates: &Rc>) -> RemainingCandidates { + RemainingCandidates { + remaining: RcVecIter::new(Rc::clone(candidates)), + has_another: None, + } + } + + /// Attempts to find another candidate to check from this list. + /// + /// This method will attempt to move this iterator forward, returning a + /// candidate that's possible to activate. The `cx` argument is the current + /// context which determines validity for candidates returned, and the `dep` + /// is the dependency listing that we're activating for. + /// + /// If successful a `(Candidate, bool)` pair will be returned. The + /// `Candidate` is the candidate to attempt to activate, and the `bool` is + /// an indicator of whether there are remaining candidates to try of if + /// we've reached the end of iteration. + /// + /// If we've reached the end of the iterator here then `Err` will be + /// returned. The error will contain a map of package ID to conflict reason, + /// where each package ID caused a candidate to be filtered out from the + /// original list for the reason listed. + fn next( + &mut self, + conflicting_prev_active: &mut ConflictMap, + cx: &Context, + dep: &Dependency, + parent: PackageId, + ) -> Option<(Summary, bool)> { + 'main: for b in self.remaining.by_ref() { + let b_id = b.package_id(); + // The `links` key in the manifest dictates that there's only one + // package in a dependency graph, globally, with that particular + // `links` key. If this candidate links to something that's already + // linked to by a different package then we've gotta skip this. + if let Some(link) = b.links() { + if let Some(&a) = cx.links.get(&link) { + if a != b_id { + conflicting_prev_active + .entry(a) + .or_insert_with(|| ConflictReason::Links(link)); + continue; + } + } } - for dep in s.dependencies().iter().filter(|d| d.is_optional()) { - try!(add_feature(s, dep.name(), &mut deps, &mut used, - &mut visited)); + + // Otherwise the condition for being a valid candidate relies on + // semver. Cargo dictates that you can't duplicate multiple + // semver-compatible versions of a crate. For example we can't + // simultaneously activate `foo 1.0.2` and `foo 1.2.0`. We can, + // however, activate `1.0.2` and `2.0.0`. + // + // Here we throw out our candidate if it's *compatible*, yet not + // equal, to all previously activated versions. + if let Some((a, _)) = cx.activations.get(&b_id.as_activations_key()) { + if *a != b { + conflicting_prev_active + .entry(a.package_id()) + .or_insert(ConflictReason::Semver); + continue; + } } - } - Method::Required { features: requested_features, .. } => { - for feat in requested_features.iter() { - try!(add_feature(s, feat, &mut deps, &mut used, &mut visited)); + // We may still have to reject do to a public dependency conflict. If one of any of our + // ancestors that can see us already knows about a different crate with this name then + // we have to reject this candidate. Additionally this candidate may already have been + // activated and have public dependants of its own, + // all of witch also need to be checked the same way. + if let Some(public_dependency) = cx.public_dependency.as_ref() { + let existing_public_deps: Vec = public_dependency + .get(&b_id) + .iter() + .flat_map(|x| x.values()) + .filter_map(|x| if x.1 { Some(&x.0) } else { None }) + .chain(&Some(b_id)) + .cloned() + .collect(); + for t in existing_public_deps { + // for each (transitive) parent that can newly see `t` + let mut stack = vec![(parent, dep.is_public())]; + while let Some((p, public)) = stack.pop() { + // TODO: dont look at the same thing more then once + if let Some(o) = public_dependency.get(&p).and_then(|x| x.get(&t.name())) { + if o.0 != t { + // the (transitive) parent can already see a different version by `t`s name. + // So, adding `b` will cause `p` to have a public dependency conflict on `t`. + conflicting_prev_active.insert(p, ConflictReason::PublicDependency); + continue 'main; + } + } + // if `b` was a private dependency of `p` then `p` parents can't see `t` thru `p` + if public { + // if it was public, then we add all of `p`s parents to be checked + for &(grand, ref d) in cx.parents.edges(&p) { + stack.push((grand, d.iter().any(|x| x.is_public()))); + } + } + } + } } - } - } - match *method { - Method::Everything | - Method::Required { uses_default_features: true, .. } => { - if s.features().get("default").is_some() { - try!(add_feature(s, "default", &mut deps, &mut used, - &mut visited)); + + // Well if we made it this far then we've got a valid dependency. We + // want this iterator to be inherently "peekable" so we don't + // necessarily return the item just yet. Instead we stash it away to + // get returned later, and if we replaced something then that was + // actually the candidate to try first so we return that. + if let Some(r) = mem::replace(&mut self.has_another, Some(b)) { + return Some((r, true)); } } - Method::Required { uses_default_features: false, .. } => {} + + // Alright we've entirely exhausted our list of candidates. If we've got + // something stashed away return that here (also indicating that there's + // nothing else). + self.has_another.take().map(|r| (r, false)) } - return Ok((deps, used)); - - fn add_feature(s: &Summary, feat: &str, - deps: &mut HashMap>, - used: &mut HashSet, - visited: &mut HashSet) -> CargoResult<()> { - if feat.is_empty() { return Ok(()) } - - // If this feature is of the form `foo/bar`, then we just lookup package - // `foo` and enable its feature `bar`. Otherwise this feature is of the - // form `foo` and we need to recurse to enable the feature `foo` for our - // own package, which may end up enabling more features or just enabling - // a dependency. - let mut parts = feat.splitn(2, '/'); - let feat_or_package = parts.next().unwrap(); - match parts.next() { - Some(feat) => { - let package = feat_or_package; - used.insert(package.to_string()); - deps.entry(package.to_string()) - .or_insert(Vec::new()) - .push(feat.to_string()); - } - None => { - let feat = feat_or_package; - if !visited.insert(feat.to_string()) { - return Err(human(format!("Cyclic feature dependency: \ - feature `{}` depends on itself", - feat))) +} + +/// Attempts to find a new conflict that allows a `find_candidate` feather then the input one. +/// It will add the new conflict to the cache if one is found. +/// +/// Panics if the input conflict is not all active in `cx`. +fn generalize_conflicting( + cx: &Context, + registry: &mut RegistryQueryer<'_>, + past_conflicting_activations: &mut conflict_cache::ConflictCache, + parent: &Summary, + dep: &Dependency, + conflicting_activations: &ConflictMap, +) -> Option { + if conflicting_activations.is_empty() { + return None; + } + // We need to determine the `ContextAge` that this `conflicting_activations` will jump to, and why. + let (backtrack_critical_age, backtrack_critical_id) = conflicting_activations + .keys() + .map(|&c| (cx.is_active(c).expect("not currently active!?"), c)) + .max() + .unwrap(); + let backtrack_critical_reason: ConflictReason = + conflicting_activations[&backtrack_critical_id].clone(); + + if cx + .parents + .is_path_from_to(&parent.package_id(), &backtrack_critical_id) + { + // We are a descendant of the trigger of the problem. + // The best generalization of this is to let things bubble up + // and let `backtrack_critical_id` figure this out. + return None; + } + // What parents does that critical activation have + for (critical_parent, critical_parents_deps) in + cx.parents.edges(&backtrack_critical_id).filter(|(p, _)| { + // it will only help backjump further if it is older then the critical_age + cx.is_active(*p).expect("parent not currently active!?") < backtrack_critical_age + }) + { + for critical_parents_dep in critical_parents_deps.iter() { + // A dep is equivalent to one of the things it can resolve to. + // Thus, if all the things it can resolve to have already ben determined + // to be conflicting, then we can just say that we conflict with the parent. + if let Some(others) = registry + .query(critical_parents_dep) + .expect("an already used dep now error!?") + .iter() + .rev() // the last one to be tried is the least likely to be in the cache, so start with that. + .map(|other| { + past_conflicting_activations + .find( + dep, + &|id| { + if id == other.package_id() { + // we are imagining that we used other instead + Some(backtrack_critical_age) + } else { + cx.is_active(id) + } + }, + Some(other.package_id()), + // we only care about things that are newer then critical_age + backtrack_critical_age, + ) + .map(|con| (other.package_id(), con)) + }) + .collect::>>() + { + let mut con = conflicting_activations.clone(); + // It is always valid to combine previously inserted conflicts. + // A, B are both known bad states each that can never be activated. + // A + B is redundant but cant be activated, as if + // A + B is active then A is active and we know that is not ok. + for (_, other) in &others { + con.extend(other.iter().map(|(&id, re)| (id, re.clone()))); } - used.insert(feat.to_string()); - match s.features().get(feat) { - Some(recursive) => { - for f in recursive { - try!(add_feature(s, f, deps, used, visited)); - } - } - None => { - deps.entry(feat.to_string()).or_insert(Vec::new()); - } + // Now that we have this combined conflict, we can do a substitution: + // A dep is equivalent to one of the things it can resolve to. + // So we can remove all the things that it resolves to and replace with the parent. + for (other_id, _) in &others { + con.remove(other_id); } - visited.remove(&feat.to_string()); + con.insert(*critical_parent, backtrack_critical_reason); + + if cfg!(debug_assertions) { + // the entire point is to find an older conflict, so let's make sure we did + let new_age = con + .keys() + .map(|&c| cx.is_active(c).expect("not currently active!?")) + .max() + .unwrap(); + assert!( + new_age < backtrack_critical_age, + "new_age {} < backtrack_critical_age {}", + new_age, + backtrack_critical_age + ); + } + past_conflicting_activations.insert(dep, &con); + return Some(con); } } - Ok(()) } + None } -impl Context { - // Activate this summary by inserting it into our list of known activations. +/// Looks through the states in `backtrack_stack` for dependencies with +/// remaining candidates. For each one, also checks if rolling back +/// could change the outcome of the failed resolution that caused backtracking +/// in the first place. Namely, if we've backtracked past the parent of the +/// failed dep, or any of the packages flagged as giving us trouble in +/// `conflicting_activations`. +/// +/// Read +/// For several more detailed explanations of the logic here. +fn find_candidate( + cx: &Context, + backtrack_stack: &mut Vec, + parent: &Summary, + backtracked: bool, + conflicting_activations: &ConflictMap, +) -> Option<(Summary, bool, BacktrackFrame)> { + // When we're calling this method we know that `parent` failed to + // activate. That means that some dependency failed to get resolved for + // whatever reason. Normally, that means that all of those reasons + // (plus maybe some extras) are listed in `conflicting_activations`. // - // Returns if this summary with the given method is already activated. - fn flag_activated(&mut self, - summary: &Rc, - method: &Method) -> bool { - let id = summary.package_id(); - let key = (id.name().to_string(), id.source_id().clone()); - let prev = self.activations.entry(key).or_insert(Vec::new()); - if !prev.iter().any(|c| c == summary) { - self.resolve.graph.add(id.clone(), &[]); - prev.push(summary.clone()); - return false - } - debug!("checking if {} is already activated", summary.package_id()); - let (features, use_default) = match *method { - Method::Required { features, uses_default_features, .. } => { - (features, uses_default_features) - } - Method::Everything => return false, + // The abnormal situations are things that do not put all of the reasons in `conflicting_activations`: + // If we backtracked we do not know how our `conflicting_activations` related to + // the cause of that backtrack, so we do not update it. + // If we had a PublicDependency conflict, then we do not yet have a compact way to + // represent all the parts of the problem, so `conflicting_activations` is incomplete. + let age = if !backtracked + && !conflicting_activations + .values() + .any(|c| *c == ConflictReason::PublicDependency) + { + // we dont have abnormal situations. So we can ask `cx` for how far back we need to go. + let a = cx.is_conflicting(Some(parent.package_id()), conflicting_activations); + // If the `conflicting_activations` does not apply to `cx`, then something went very wrong + // in building it. But we will just fall back to laboriously trying all possibilities witch + // will give us the correct answer so only `assert` if there is a developer to debug it. + debug_assert!(a.is_some()); + a + } else { + None + }; + + while let Some(mut frame) = backtrack_stack.pop() { + let next = frame.remaining_candidates.next( + &mut frame.conflicting_activations, + &frame.context, + &frame.dep, + frame.parent.package_id(), + ); + let (candidate, has_another) = match next { + Some(pair) => pair, + None => continue, }; - let has_default_feature = summary.features().contains_key("default"); - match self.resolve.features(id) { - Some(prev) => { - features.iter().all(|f| prev.contains(f)) && - (!use_default || prev.contains("default") || - !has_default_feature) + // If all members of `conflicting_activations` are still + // active in this back up we know that we're guaranteed to not actually + // make any progress. As a result if we hit this condition we can + // completely skip this backtrack frame and move on to the next. + if let Some(age) = age { + if frame.context.age() > age { + trace!( + "{} = \"{}\" skip as not solving {}: {:?}", + frame.dep.package_name(), + frame.dep.version_req(), + parent.package_id(), + conflicting_activations + ); + // above we use `cx` to determine that this is still going to be conflicting. + // but lets just double check. + debug_assert!( + frame + .context + .is_conflicting(Some(parent.package_id()), conflicting_activations) + == Some(age) + ); + continue; + } else { + // above we use `cx` to determine that this is not going to be conflicting. + // but lets just double check. + debug_assert!(frame + .context + .is_conflicting(Some(parent.package_id()), conflicting_activations) + .is_none()); } - None => features.len() == 0 && (!use_default || !has_default_feature) } - } - fn build_deps(&mut self, registry: &mut Registry, - parent: &Summary, - method: &Method) -> CargoResult> { - // First, figure out our set of dependencies based on the requsted set - // of features. This also calculates what features we're going to enable - // for our own dependencies. - let deps = try!(self.resolve_features(parent, method)); - - // Next, transform all dependencies into a list of possible candidates - // which can satisfy that dependency. - let mut deps = try!(deps.into_iter().map(|(dep, features)| { - let mut candidates = try!(registry.query(&dep)); - // When we attempt versions for a package, we'll want to start at - // the maximum version and work our way down. - candidates.sort_by(|a, b| { - b.version().cmp(a.version()) - }); - let candidates = candidates.into_iter().map(Rc::new).collect(); - Ok((dep, candidates, features)) - }).collect::>>()); - - // Attempt to resolve dependencies with fewer candidates before trying - // dependencies with more candidates. This way if the dependency with - // only one candidate can't be resolved we don't have to do a bunch of - // work before we figure that out. - deps.sort_by(|&(_, ref a, _), &(_, ref b, _)| { - a.len().cmp(&b.len()) - }); - - Ok(deps) + return Some((candidate, has_another, frame)); } + None +} - fn prev_active(&self, dep: &Dependency) -> &[Rc] { - let key = (dep.name().to_string(), dep.source_id().clone()); - self.activations.get(&key).map(|v| &v[..]).unwrap_or(&[]) +fn check_cycles(resolve: &Resolve) -> CargoResult<()> { + // Sort packages to produce user friendly deterministic errors. + let mut all_packages: Vec<_> = resolve.iter().collect(); + all_packages.sort_unstable(); + let mut checked = HashSet::new(); + for pkg in all_packages { + if !checked.contains(&pkg) { + visit(resolve, pkg, &mut HashSet::new(), &mut checked)? + } } + return Ok(()); - #[allow(deprecated)] // connect => join in 1.3 - fn resolve_features(&mut self, parent: &Summary, method: &Method) - -> CargoResult)>> { - let dev_deps = match *method { - Method::Everything => true, - Method::Required { dev_deps, .. } => dev_deps, - }; - - // First, filter by dev-dependencies - let deps = parent.dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); + fn visit( + resolve: &Resolve, + id: PackageId, + visited: &mut HashSet, + checked: &mut HashSet, + ) -> CargoResult<()> { + // See if we visited ourselves + if !visited.insert(id) { + failure::bail!( + "cyclic package dependency: package `{}` depends on itself. Cycle:\n{}", + id, + errors::describe_path(&resolve.path_to_top(&id)) + ); + } - let (mut feature_deps, used_features) = try!(build_features(parent, - method)); - let mut ret = Vec::new(); + // If we've already checked this node no need to recurse again as we'll + // just conclude the same thing as last time, so we only execute the + // recursive step if we successfully insert into `checked`. + // + // Note that if we hit an intransitive dependency then we clear out the + // visitation list as we can't induce a cycle through transitive + // dependencies. + if checked.insert(id) { + for (dep, listings) in resolve.deps_not_replaced(id) { + let is_transitive = listings.iter().any(|d| d.is_transitive()); + let mut empty = HashSet::new(); + let visited = if is_transitive { + &mut *visited + } else { + &mut empty + }; + visit(resolve, dep, visited, checked)?; - // Next, sanitize all requested features by whitelisting all the - // requested features that correspond to optional dependencies - for dep in deps { - // weed out optional dependencies, but not those required - if dep.is_optional() && !feature_deps.contains_key(dep.name()) { - continue - } - let mut base = feature_deps.remove(dep.name()).unwrap_or(vec![]); - for feature in dep.features().iter() { - base.push(feature.clone()); - if feature.contains("/") { - return Err(human(format!("features in dependencies \ - cannot enable features in \ - other dependencies: `{}`", - feature))); + if let Some(id) = resolve.replacement(dep) { + visit(resolve, id, visited, checked)?; } } - ret.push((dep.clone(), base)); } - // All features can only point to optional dependencies, in which case - // they should have all been weeded out by the above iteration. Any - // remaining features are bugs in that the package does not actually - // have those features. - if feature_deps.len() > 0 { - let unknown = feature_deps.keys().map(|s| &s[..]) - .collect::>(); - if unknown.len() > 0 { - let features = unknown.connect(", "); - return Err(human(format!("Package `{}` does not have these \ - features: `{}`", parent.package_id(), - features))) - } - } + // Ok, we're done, no longer visiting our node any more + visited.remove(&id); + Ok(()) + } +} - // Record what list of features is active for this package. - if used_features.len() > 0 { - let pkgid = parent.package_id(); - self.resolve.features.entry(pkgid.clone()) - .or_insert(HashSet::new()) - .extend(used_features); +/// Checks that packages are unique when written to lock file. +/// +/// When writing package ID's to lock file, we apply lossy encoding. In +/// particular, we don't store paths of path dependencies. That means that +/// *different* packages may collide in the lock file, hence this check. +fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> { + let mut unique_pkg_ids = HashMap::new(); + let state = encode::EncodeState::new(resolve); + for pkg_id in resolve.iter() { + let encodable_pkd_id = encode::encodable_package_id(pkg_id, &state); + if let Some(prev_pkg_id) = unique_pkg_ids.insert(encodable_pkd_id, pkg_id) { + failure::bail!( + "package collision in the lockfile: packages {} and {} are different, \ + but only one can be written to lockfile unambiguously", + prev_pkg_id, + pkg_id + ) } - - Ok(ret) } + Ok(()) } diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs new file mode 100644 index 00000000000..7f210a1e812 --- /dev/null +++ b/src/cargo/core/resolver/resolve.rs @@ -0,0 +1,394 @@ +use std::borrow::Borrow; +use std::collections::{HashMap, HashSet}; +use std::fmt; +use std::iter::FromIterator; + +use url::Url; + +use crate::core::dependency::Kind; +use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target}; +use crate::util::errors::CargoResult; +use crate::util::{Graph, Platform}; + +use super::encode::Metadata; + +/// Represents a fully-resolved package dependency graph. Each node in the graph +/// is a package and edges represent dependencies between packages. +/// +/// Each instance of `Resolve` also understands the full set of features used +/// for each package. +#[derive(PartialEq)] +pub struct Resolve { + /// A graph, whose vertices are packages and edges are dependency specifications + /// from `Cargo.toml`. We need a `Vec` here because the same package + /// might be present in both `[dependencies]` and `[build-dependencies]`. + graph: Graph>, + /// Replacements from the `[replace]` table. + replacements: HashMap, + /// Inverted version of `replacements`. + reverse_replacements: HashMap, + /// An empty `HashMap` to avoid creating a new `HashMap` for every package + /// that does not have any features, and to avoid using `Option` to + /// simplify the API. + empty_features: HashMap>, + /// Features enabled for a given package. + features: HashMap>>, + /// Checksum for each package. A SHA256 hash of the `.crate` file used to + /// validate the correct crate file is used. This is `None` for sources + /// that do not use `.crate` files, like path or git dependencies. + checksums: HashMap>, + /// "Unknown" metadata. This is a collection of extra, unrecognized data + /// found in the `[metadata]` section of `Cargo.lock`, preserved for + /// forwards compatibility. + metadata: Metadata, + /// `[patch]` entries that did not match anything, preserved in + /// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused + /// patches helps prevent Cargo from being forced to re-update the + /// registry every time it runs, and keeps the resolve in a locked state + /// so it doesn't re-resolve the unused entries. + unused_patches: Vec, + /// A map from packages to a set of their public dependencies + public_dependencies: HashMap>, + /// Version of the `Cargo.lock` format, see + /// `cargo::core::resolver::encode` for more. + version: ResolveVersion, +} + +/// A version to indicate how a `Cargo.lock` should be serialized. Currently V1 +/// is the default and dates back to the origins of Cargo. A V2 is currently +/// being proposed which provides a much more compact representation of +/// dependency edges and also moves checksums out of `[metadata]`. +/// +/// It's theorized that we can add more here over time to track larger changes +/// to the `Cargo.lock` format, but we've yet to see how that strategy pans out. +#[derive(PartialEq, Clone, Debug)] +pub enum ResolveVersion { + V1, + V2, +} + +impl Resolve { + pub fn new( + graph: Graph>, + replacements: HashMap, + features: HashMap>>, + checksums: HashMap>, + metadata: Metadata, + unused_patches: Vec, + version: ResolveVersion, + ) -> Resolve { + let reverse_replacements = replacements.iter().map(|(&p, &r)| (r, p)).collect(); + let public_dependencies = graph + .iter() + .map(|p| { + let public_deps = graph + .edges(p) + .filter(|(_, deps)| { + deps.iter() + .any(|d| d.kind() == Kind::Normal && d.is_public()) + }) + .map(|(dep_package, _)| *dep_package) + .collect::>(); + + (*p, public_deps) + }) + .collect(); + + Resolve { + graph, + replacements, + features, + checksums, + metadata, + unused_patches, + empty_features: HashMap::new(), + reverse_replacements, + public_dependencies, + version, + } + } + + /// Resolves one of the paths from the given dependent package up to + /// the root. + pub fn path_to_top<'a>(&'a self, pkg: &'a PackageId) -> Vec<&'a PackageId> { + self.graph.path_to_top(pkg) + } + + pub fn register_used_patches(&mut self, patches: &HashMap>) { + for summary in patches.values().flat_map(|v| v) { + if self.iter().any(|id| id == summary.package_id()) { + continue; + } + self.unused_patches.push(summary.package_id()); + } + } + + pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> { + // Given a previous instance of resolve, it should be forbidden to ever + // have a checksums which *differ*. If the same package ID has differing + // checksums, then something has gone wrong such as: + // + // * Something got seriously corrupted + // * A "mirror" isn't actually a mirror as some changes were made + // * A replacement source wasn't actually a replacement, some changes + // were made + // + // In all of these cases, we want to report an error to indicate that + // something is awry. Normal execution (esp just using crates.io) should + // never run into this. + for (id, cksum) in previous.checksums.iter() { + if let Some(mine) = self.checksums.get(id) { + if mine == cksum { + continue; + } + + // If the previous checksum wasn't calculated, the current + // checksum is `Some`. This may indicate that a source was + // erroneously replaced or was replaced with something that + // desires stronger checksum guarantees than can be afforded + // elsewhere. + if cksum.is_none() { + failure::bail!( + "\ +checksum for `{}` was not previously calculated, but a checksum could now \ +be calculated + +this could be indicative of a few possible situations: + + * the source `{}` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt +", + id, + id.source_id() + ) + + // If our checksum hasn't been calculated, then it could mean + // that future Cargo figured out how to checksum something or + // more realistically we were overridden with a source that does + // not have checksums. + } else if mine.is_none() { + failure::bail!( + "\ +checksum for `{}` could not be calculated, but a checksum is listed in \ +the existing lock file + +this could be indicative of a few possible situations: + + * the source `{}` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `{0}` is the same as when the lockfile was generated +", + id, + id.source_id() + ) + + // If the checksums aren't equal, and neither is None, then they + // must both be Some, in which case the checksum now differs. + // That's quite bad! + } else { + failure::bail!( + "\ +checksum for `{}` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g., a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `{0}` is the same as when the lockfile was generated +", + id + ); + } + } + } + + // Be sure to just copy over any unknown metadata. + self.metadata = previous.metadata.clone(); + + // The goal of Cargo is largely to preserve the encoding of + // `Cargo.lock` that it finds on the filesystem. Sometimes `Cargo.lock` + // changes are in the works where they haven't been set as the default + // yet but will become the default soon. We want to preserve those + // features if we find them. + // + // For this reason if the previous `Cargo.lock` is from the future, or + // otherwise it looks like it's produced with future features we + // understand, then the new resolve will be encoded with the same + // version. Note that new instances of `Resolve` always use the default + // encoding, and this is where we switch it to a future encoding if the + // future encoding isn't yet the default. + if previous.version.from_the_future() { + self.version = previous.version.clone(); + } + + Ok(()) + } + + pub fn contains(&self, k: &Q) -> bool + where + PackageId: Borrow, + Q: Ord + Eq, + { + self.graph.contains(k) + } + + pub fn sort(&self) -> Vec { + self.graph.sort() + } + + pub fn iter<'a>(&'a self) -> impl Iterator + 'a { + self.graph.iter().cloned() + } + + pub fn deps(&self, pkg: PackageId) -> impl Iterator { + self.deps_not_replaced(pkg) + .map(move |(id, deps)| (self.replacement(id).unwrap_or(id), deps)) + } + + pub fn deps_not_replaced( + &self, + pkg: PackageId, + ) -> impl Iterator { + self.graph + .edges(&pkg) + .map(|(id, deps)| (*id, deps.as_slice())) + } + + pub fn replacement(&self, pkg: PackageId) -> Option { + self.replacements.get(&pkg).cloned() + } + + pub fn replacements(&self) -> &HashMap { + &self.replacements + } + + pub fn features(&self, pkg: PackageId) -> &HashMap> { + self.features.get(&pkg).unwrap_or(&self.empty_features) + } + + pub fn is_public_dep(&self, pkg: PackageId, dep: PackageId) -> bool { + self.public_dependencies + .get(&pkg) + .map(|public_deps| public_deps.contains(&dep)) + .unwrap_or_else(|| panic!("Unknown dependency {:?} for package {:?}", dep, pkg)) + } + + pub fn features_sorted(&self, pkg: PackageId) -> Vec<&str> { + let mut v = Vec::from_iter(self.features(pkg).iter().map(|(s, _)| s.as_ref())); + v.sort_unstable(); + v + } + + pub fn query(&self, spec: &str) -> CargoResult { + PackageIdSpec::query_str(spec, self.iter()) + } + + pub fn unused_patches(&self) -> &[PackageId] { + &self.unused_patches + } + + pub fn checksums(&self) -> &HashMap> { + &self.checksums + } + + pub fn metadata(&self) -> &Metadata { + &self.metadata + } + + pub fn extern_crate_name( + &self, + from: PackageId, + to: PackageId, + to_target: &Target, + ) -> CargoResult { + let deps = if from == to { + &[] + } else { + self.dependencies_listed(from, to) + }; + + let crate_name = to_target.crate_name(); + let mut names = deps.iter().map(|d| { + d.explicit_name_in_toml() + .map(|s| s.as_str().replace("-", "_")) + .unwrap_or_else(|| crate_name.clone()) + }); + let name = names.next().unwrap_or_else(|| crate_name.clone()); + for n in names { + failure::ensure!( + n == name, + "the crate `{}` depends on crate `{}` multiple times with different names", + from, + to, + ); + } + Ok(name) + } + + fn dependencies_listed(&self, from: PackageId, to: PackageId) -> &[Dependency] { + // We've got a dependency on `from` to `to`, but this dependency edge + // may be affected by [replace]. If the `to` package is listed as the + // target of a replacement (aka the key of a reverse replacement map) + // then we try to find our dependency edge through that. If that fails + // then we go down below assuming it's not replaced. + // + // Note that we don't treat `from` as if it's been replaced because + // that's where the dependency originates from, and we only replace + // targets of dependencies not the originator. + if let Some(replace) = self.reverse_replacements.get(&to) { + if let Some(deps) = self.graph.edge(&from, replace) { + return deps; + } + } + match self.graph.edge(&from, &to) { + Some(ret) => ret, + None => panic!("no Dependency listed for `{}` => `{}`", from, to), + } + } + + /// Returns the version of the encoding that's being used for this lock + /// file. + pub fn version(&self) -> &ResolveVersion { + &self.version + } +} + +impl fmt::Debug for Resolve { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(fmt, "graph: {:?}", self.graph)?; + writeln!(fmt, "\nfeatures: {{")?; + for (pkg, features) in &self.features { + writeln!(fmt, " {}: {:?}", pkg, features)?; + } + write!(fmt, "}}") + } +} + +impl ResolveVersion { + /// The default way to encode `Cargo.lock`. + /// + /// This is used for new `Cargo.lock` files that are generated without a + /// previous `Cargo.lock` files, and generally matches with what we want to + /// encode. + pub fn default() -> ResolveVersion { + ResolveVersion::V1 + } + + /// Returns whether this encoding version is "from the future". + /// + /// This means that this encoding version is not currently the default but + /// intended to become the default "soon". + pub fn from_the_future(&self) -> bool { + match self { + ResolveVersion::V2 => true, + ResolveVersion::V1 => false, + } + } +} diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs new file mode 100644 index 00000000000..881869ef16f --- /dev/null +++ b/src/cargo/core/resolver/types.rs @@ -0,0 +1,368 @@ +use std::cmp::Ordering; +use std::collections::{BTreeMap, BTreeSet}; +use std::ops::Range; +use std::rc::Rc; +use std::time::{Duration, Instant}; + +use crate::core::interning::InternedString; +use crate::core::{Dependency, PackageId, Summary}; +use crate::util::errors::CargoResult; +use crate::util::Config; + +use im_rc; + +pub struct ResolverProgress { + ticks: u16, + start: Instant, + time_to_print: Duration, + printed: bool, + deps_time: Duration, + #[cfg(debug_assertions)] + slow_cpu_multiplier: u64, +} + +impl ResolverProgress { + pub fn new() -> ResolverProgress { + ResolverProgress { + ticks: 0, + start: Instant::now(), + time_to_print: Duration::from_millis(500), + printed: false, + deps_time: Duration::new(0, 0), + // Some CI setups are much slower then the equipment used by Cargo itself. + // Architectures that do not have a modern processor, hardware emulation, etc. + // In the test code we have `slow_cpu_multiplier`, but that is not accessible here. + #[cfg(debug_assertions)] + slow_cpu_multiplier: std::env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER") + .ok() + .and_then(|m| m.parse().ok()) + .unwrap_or(1), + } + } + pub fn shell_status(&mut self, config: Option<&Config>) -> CargoResult<()> { + // If we spend a lot of time here (we shouldn't in most cases) then give + // a bit of a visual indicator as to what we're doing. Only enable this + // when stderr is a tty (a human is likely to be watching) to ensure we + // get deterministic output otherwise when observed by tools. + // + // Also note that we hit this loop a lot, so it's fairly performance + // sensitive. As a result try to defer a possibly expensive operation + // like `Instant::now` by only checking every N iterations of this loop + // to amortize the cost of the current time lookup. + self.ticks += 1; + if let Some(config) = config { + if config.shell().is_err_tty() + && !self.printed + && self.ticks % 1000 == 0 + && self.start.elapsed() - self.deps_time > self.time_to_print + { + self.printed = true; + config.shell().status("Resolving", "dependency graph...")?; + } + } + #[cfg(debug_assertions)] + { + // The largest test in our suite takes less then 5000 ticks + // with all the algorithm improvements. + // If any of them are removed then it takes more than I am willing to measure. + // So lets fail the test fast if we have ben running for two long. + assert!( + self.ticks < 50_000, + "got to 50_000 ticks in {:?}", + self.start.elapsed() + ); + // The largest test in our suite takes less then 30 sec + // with all the improvements to how fast a tick can go. + // If any of them are removed then it takes more than I am willing to measure. + // So lets fail the test fast if we have ben running for two long. + if self.ticks % 1000 == 0 { + assert!( + self.start.elapsed() - self.deps_time + < Duration::from_secs(self.slow_cpu_multiplier * 90) + ); + } + } + Ok(()) + } + pub fn elapsed(&mut self, dur: Duration) { + self.deps_time += dur; + } +} + +/// The preferred way to store the set of activated features for a package. +/// This is sorted so that it impls Hash, and owns its contents, +/// needed so it can be part of the key for caching in the `DepsCache`. +/// It is also cloned often as part of `Context`, hence the `RC`. +/// `im-rs::OrdSet` was slower of small sets like this, +/// but this can change with improvements to std, im, or llvm. +/// Using a consistent type for this allows us to use the highly +/// optimized comparison operators like `is_subset` at the interfaces. +pub type FeaturesSet = Rc>; + +/// Options for how the resolve should work. +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct ResolveOpts { + /// Whether or not dev-dependencies should be included. + /// + /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`. + pub dev_deps: bool, + /// Set of features to enable (`--features=…`). + pub features: FeaturesSet, + /// Indicates *all* features should be enabled (`--all-features`). + pub all_features: bool, + /// Include the `default` feature (`--no-default-features` sets this false). + pub uses_default_features: bool, +} + +impl ResolveOpts { + /// Creates a ResolveOpts that resolves everything. + pub fn everything() -> ResolveOpts { + ResolveOpts { + dev_deps: true, + features: Rc::new(BTreeSet::new()), + all_features: true, + uses_default_features: true, + } + } + + pub fn new( + dev_deps: bool, + features: &[String], + all_features: bool, + uses_default_features: bool, + ) -> ResolveOpts { + ResolveOpts { + dev_deps, + features: Rc::new(ResolveOpts::split_features(features)), + all_features, + uses_default_features, + } + } + + fn split_features(features: &[String]) -> BTreeSet { + features + .iter() + .flat_map(|s| s.split_whitespace()) + .flat_map(|s| s.split(',')) + .filter(|s| !s.is_empty()) + .map(InternedString::new) + .collect::>() + } +} + +#[derive(Clone)] +pub struct DepsFrame { + pub parent: Summary, + pub just_for_error_messages: bool, + pub remaining_siblings: RcVecIter, +} + +impl DepsFrame { + /// Returns the least number of candidates that any of this frame's siblings + /// has. + /// + /// The `remaining_siblings` array is already sorted with the smallest + /// number of candidates at the front, so we just return the number of + /// candidates in that entry. + fn min_candidates(&self) -> usize { + self.remaining_siblings + .peek() + .map(|(_, (_, candidates, _))| candidates.len()) + .unwrap_or(0) + } + + pub fn flatten<'a>(&'a self) -> impl Iterator + 'a { + self.remaining_siblings + .clone() + .map(move |(d, _, _)| (self.parent.package_id(), d)) + } +} + +impl PartialEq for DepsFrame { + fn eq(&self, other: &DepsFrame) -> bool { + self.just_for_error_messages == other.just_for_error_messages + && self.min_candidates() == other.min_candidates() + } +} + +impl Eq for DepsFrame {} + +impl PartialOrd for DepsFrame { + fn partial_cmp(&self, other: &DepsFrame) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for DepsFrame { + fn cmp(&self, other: &DepsFrame) -> Ordering { + self.just_for_error_messages + .cmp(&other.just_for_error_messages) + .reverse() + .then_with(|| self.min_candidates().cmp(&other.min_candidates())) + } +} + +/// Note that a `OrdSet` is used for the remaining dependencies that need +/// activation. This set is sorted by how many candidates each dependency has. +/// +/// This helps us get through super constrained portions of the dependency +/// graph quickly and hopefully lock down what later larger dependencies can +/// use (those with more candidates). +#[derive(Clone)] +pub struct RemainingDeps { + /// a monotonic counter, increased for each new insertion. + time: u32, + /// the data is augmented by the insertion time. + /// This insures that no two items will cmp eq. + /// Forcing the OrdSet into a multi set. + data: im_rc::OrdSet<(DepsFrame, u32)>, +} + +impl RemainingDeps { + pub fn new() -> RemainingDeps { + RemainingDeps { + time: 0, + data: im_rc::OrdSet::new(), + } + } + pub fn push(&mut self, x: DepsFrame) { + let insertion_time = self.time; + self.data.insert((x, insertion_time)); + self.time += 1; + } + pub fn pop_most_constrained(&mut self) -> Option<(bool, (Summary, DepInfo))> { + while let Some((mut deps_frame, insertion_time)) = self.data.remove_min() { + let just_here_for_the_error_messages = deps_frame.just_for_error_messages; + + // Figure out what our next dependency to activate is, and if nothing is + // listed then we're entirely done with this frame (yay!) and we can + // move on to the next frame. + if let Some(sibling) = deps_frame.remaining_siblings.next() { + let parent = Summary::clone(&deps_frame.parent); + self.data.insert((deps_frame, insertion_time)); + return Some((just_here_for_the_error_messages, (parent, sibling))); + } + } + None + } + pub fn iter<'a>(&'a mut self) -> impl Iterator + 'a { + self.data.iter().flat_map(|(other, _)| other.flatten()) + } +} + +/// Information about the dependencies for a crate, a tuple of: +/// +/// (dependency info, candidates, features activated) +pub type DepInfo = (Dependency, Rc>, FeaturesSet); + +/// All possible reasons that a package might fail to activate. +/// +/// We maintain a list of conflicts for error reporting as well as backtracking +/// purposes. Each reason here is why candidates may be rejected or why we may +/// fail to resolve a dependency. +#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] +pub enum ConflictReason { + /// There was a semver conflict, for example we tried to activate a package + /// 1.0.2 but 1.1.0 was already activated (aka a compatible semver version + /// is already activated) + Semver, + + /// The `links` key is being violated. For example one crate in the + /// dependency graph has `links = "foo"` but this crate also had that, and + /// we're only allowed one per dependency graph. + Links(InternedString), + + /// A dependency listed features that weren't actually available on the + /// candidate. For example we tried to activate feature `foo` but the + /// candidate we're activating didn't actually have the feature `foo`. + MissingFeatures(String), + + /// A dependency listed features that ended up being a required dependency. + /// For example we tried to activate feature `foo` but the + /// candidate we're activating didn't actually have the feature `foo` + /// it had a dependency `foo` instead. + RequiredDependencyAsFeatures(InternedString), + + // TODO: needs more info for `activation_error` + // TODO: needs more info for `find_candidate` + /// pub dep error + PublicDependency, +} + +impl ConflictReason { + pub fn is_links(&self) -> bool { + if let ConflictReason::Links(_) = *self { + return true; + } + false + } + + pub fn is_missing_features(&self) -> bool { + if let ConflictReason::MissingFeatures(_) = *self { + return true; + } + false + } + + pub fn is_required_dependency_as_features(&self) -> bool { + if let ConflictReason::RequiredDependencyAsFeatures(_) = *self { + return true; + } + false + } +} + +/// A list of packages that have gotten in the way of resolving a dependency. +/// If resolving a dependency fails then this represents an incompatibility, +/// that dependency will never be resolve while all of these packages are active. +/// This is useless if the packages can't be simultaneously activated for other reasons. +pub type ConflictMap = BTreeMap; + +pub struct RcVecIter { + vec: Rc>, + rest: Range, +} + +impl RcVecIter { + pub fn new(vec: Rc>) -> RcVecIter { + RcVecIter { + rest: 0..vec.len(), + vec, + } + } + + fn peek(&self) -> Option<(usize, &T)> { + self.rest + .clone() + .next() + .and_then(|i| self.vec.get(i).map(|val| (i, &*val))) + } +} + +// Not derived to avoid `T: Clone` +impl Clone for RcVecIter { + fn clone(&self) -> RcVecIter { + RcVecIter { + vec: self.vec.clone(), + rest: self.rest.clone(), + } + } +} + +impl Iterator for RcVecIter +where + T: Clone, +{ + type Item = T; + + fn next(&mut self) -> Option { + self.rest.next().and_then(|i| self.vec.get(i).cloned()) + } + + fn size_hint(&self) -> (usize, Option) { + // rest is a std::ops::Range, which is an ExactSizeIterator. + self.rest.size_hint() + } +} + +impl ExactSizeIterator for RcVecIter {} diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index 6e0a1956410..3f1d8600315 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -1,253 +1,470 @@ use std::fmt; use std::io::prelude::*; -use std::io; -use term::Attr; -use term::color::{Color, BLACK, RED, GREEN, YELLOW}; -use term::{Terminal, TerminfoTerminal, color}; +use atty; +use termcolor::Color::{Cyan, Green, Red, Yellow}; +use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor}; -use self::AdequateTerminal::{NoColor, Colored}; -use self::Verbosity::{Verbose, Normal, Quiet}; -use self::ColorConfig::{Auto, Always, Never}; +use crate::util::errors::CargoResult; -use util::errors::{human, CargoResult}; - -#[derive(Clone, Copy, PartialEq)] +/// The requested verbosity of output. +#[derive(Debug, Clone, Copy, PartialEq)] pub enum Verbosity { Verbose, Normal, - Quiet + Quiet, } -#[derive(Clone, Copy, PartialEq)] -pub enum ColorConfig { - Auto, - Always, - Never +/// An abstraction around a `Write`able object that remembers preferences for output verbosity and +/// color. +pub struct Shell { + /// the `Write`able object, either with or without color support (represented by different enum + /// variants) + err: ShellOut, + /// How verbose messages should be + verbosity: Verbosity, + /// Flag that indicates the current line needs to be cleared before + /// printing. Used when a progress bar is currently displayed. + needs_clear: bool, } -#[derive(Clone, Copy)] -pub struct ShellConfig { - pub color_config: ColorConfig, - pub tty: bool +impl fmt::Debug for Shell { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.err { + ShellOut::Write(_) => f + .debug_struct("Shell") + .field("verbosity", &self.verbosity) + .finish(), + ShellOut::Stream { color_choice, .. } => f + .debug_struct("Shell") + .field("verbosity", &self.verbosity) + .field("color_choice", &color_choice) + .finish(), + } + } } -enum AdequateTerminal { - NoColor(Box), - Colored(Box> + Send>) +/// A `Write`able object, either with or without color support +enum ShellOut { + /// A plain write object without color support + Write(Box), + /// Color-enabled stdio, with information on whether color should be used + Stream { + stream: StandardStream, + tty: bool, + color_choice: ColorChoice, + }, } -pub struct Shell { - terminal: AdequateTerminal, - config: ShellConfig, +/// Whether messages should use color output +#[derive(Debug, PartialEq, Clone, Copy)] +pub enum ColorChoice { + /// Force color output + Always, + /// Force disable color output + Never, + /// Intelligently guess whether to use color output + CargoAuto, } -pub struct MultiShell { - out: Shell, - err: Shell, - verbosity: Verbosity -} +impl Shell { + /// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose + /// output. + pub fn new() -> Shell { + Shell { + err: ShellOut::Stream { + stream: StandardStream::stderr(ColorChoice::CargoAuto.to_termcolor_color_choice()), + color_choice: ColorChoice::CargoAuto, + tty: atty::is(atty::Stream::Stderr), + }, + verbosity: Verbosity::Verbose, + needs_clear: false, + } + } -impl MultiShell { - pub fn new(out: Shell, err: Shell, verbosity: Verbosity) -> MultiShell { - MultiShell { out: out, err: err, verbosity: verbosity } + /// Creates a shell from a plain writable object, with no color, and max verbosity. + pub fn from_write(out: Box) -> Shell { + Shell { + err: ShellOut::Write(out), + verbosity: Verbosity::Verbose, + needs_clear: false, + } } - pub fn out(&mut self) -> &mut Shell { - &mut self.out + /// Prints a message, where the status will have `color` color, and can be justified. The + /// messages follows without color. + fn print( + &mut self, + status: &dyn fmt::Display, + message: Option<&dyn fmt::Display>, + color: Color, + justified: bool, + ) -> CargoResult<()> { + match self.verbosity { + Verbosity::Quiet => Ok(()), + _ => { + if self.needs_clear { + self.err_erase_line(); + } + self.err.print(status, message, color, justified) + } + } } - pub fn err(&mut self) -> &mut Shell { - &mut self.err + /// Sets whether the next print should clear the current line. + pub fn set_needs_clear(&mut self, needs_clear: bool) { + self.needs_clear = needs_clear; } - pub fn say(&mut self, message: T, color: Color) -> io::Result<()> { - match self.verbosity { - Quiet => Ok(()), - _ => self.out().say(message, color) + /// Returns `true` if the `needs_clear` flag is unset. + pub fn is_cleared(&self) -> bool { + !self.needs_clear + } + + /// Returns the width of the terminal in spaces, if any. + pub fn err_width(&self) -> Option { + match self.err { + ShellOut::Stream { tty: true, .. } => imp::stderr_width(), + _ => None, } } - pub fn status(&mut self, status: T, message: U) -> io::Result<()> - where T: fmt::Display, U: fmt::Display + /// Returns `true` if stderr is a tty. + pub fn is_err_tty(&self) -> bool { + match self.err { + ShellOut::Stream { tty, .. } => tty, + _ => false, + } + } + + /// Gets a reference to the underlying writer. + pub fn err(&mut self) -> &mut dyn Write { + if self.needs_clear { + self.err_erase_line(); + } + self.err.as_write() + } + + /// Erase from cursor to end of line. + pub fn err_erase_line(&mut self) { + if let ShellOut::Stream { tty: true, .. } = self.err { + imp::err_erase_line(self); + self.needs_clear = false; + } + } + + /// Shortcut to right-align and color green a status message. + pub fn status(&mut self, status: T, message: U) -> CargoResult<()> + where + T: fmt::Display, + U: fmt::Display, + { + self.print(&status, Some(&message), Green, true) + } + + pub fn status_header(&mut self, status: T) -> CargoResult<()> + where + T: fmt::Display, + { + self.print(&status, None, Cyan, true) + } + + /// Shortcut to right-align a status message. + pub fn status_with_color( + &mut self, + status: T, + message: U, + color: Color, + ) -> CargoResult<()> + where + T: fmt::Display, + U: fmt::Display, + { + self.print(&status, Some(&message), color, true) + } + + /// Runs the callback only if we are in verbose mode. + pub fn verbose(&mut self, mut callback: F) -> CargoResult<()> + where + F: FnMut(&mut Shell) -> CargoResult<()>, { match self.verbosity { - Quiet => Ok(()), - _ => self.out().say_status(status, message, GREEN) + Verbosity::Verbose => callback(self), + _ => Ok(()), } } - pub fn verbose(&mut self, mut callback: F) -> io::Result<()> - where F: FnMut(&mut MultiShell) -> io::Result<()> + /// Runs the callback if we are not in verbose mode. + pub fn concise(&mut self, mut callback: F) -> CargoResult<()> + where + F: FnMut(&mut Shell) -> CargoResult<()>, { match self.verbosity { - Verbose => return callback(self), - _ => Ok(()) + Verbosity::Verbose => Ok(()), + _ => callback(self), } } - pub fn concise(&mut self, mut callback: F) -> io::Result<()> - where F: FnMut(&mut MultiShell) -> io::Result<()> - { + /// Prints a red 'error' message. + pub fn error(&mut self, message: T) -> CargoResult<()> { + if self.needs_clear { + self.err_erase_line(); + } + self.err.print(&"error:", Some(&message), Red, false) + } + + /// Prints an amber 'warning' message. + pub fn warn(&mut self, message: T) -> CargoResult<()> { match self.verbosity { - Verbose => Ok(()), - _ => return callback(self) + Verbosity::Quiet => Ok(()), + _ => self.print(&"warning:", Some(&message), Yellow, false), } } - pub fn error(&mut self, message: T) -> io::Result<()> { - self.err().say(message, RED) + /// Updates the verbosity of the shell. + pub fn set_verbosity(&mut self, verbosity: Verbosity) { + self.verbosity = verbosity; } - pub fn warn(&mut self, message: T) -> io::Result<()> { - self.err().say(message, YELLOW) + /// Gets the verbosity of the shell. + pub fn verbosity(&self) -> Verbosity { + self.verbosity } - pub fn set_verbosity(&mut self, verbose: bool, quiet: bool) -> CargoResult<()> { - self.verbosity = match (verbose, quiet) { - (true, true) => return Err(human("cannot set both --verbose and --quiet")), - (true, false) => Verbose, - (false, true) => Quiet, - (false, false) => Normal - }; + /// Updates the color choice (always, never, or auto) from a string.. + pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> { + if let ShellOut::Stream { + ref mut stream, + ref mut color_choice, + .. + } = self.err + { + let cfg = match color { + Some("always") => ColorChoice::Always, + Some("never") => ColorChoice::Never, + + Some("auto") | None => ColorChoice::CargoAuto, + + Some(arg) => failure::bail!( + "argument for --color must be auto, always, or \ + never, but found `{}`", + arg + ), + }; + *color_choice = cfg; + *stream = StandardStream::stderr(cfg.to_termcolor_color_choice()); + } Ok(()) } - /// shortcut for commands that don't have both --verbose and --quiet - pub fn set_verbose(&mut self, verbose: bool) { - if verbose { - self.verbosity = Verbose; - } else { - self.verbosity = Normal; + /// Gets the current color choice. + /// + /// If we are not using a color stream, this will always return `Never`, even if the color + /// choice has been set to something else. + pub fn color_choice(&self) -> ColorChoice { + match self.err { + ShellOut::Stream { color_choice, .. } => color_choice, + ShellOut::Write(_) => ColorChoice::Never, } } - pub fn set_color_config(&mut self, color: Option<&str>) -> CargoResult<()> { - self.out.set_color_config(match color { - Some("auto") => Auto, - Some("always") => Always, - Some("never") => Never, - - None => Auto, + /// Whether the shell supports color. + pub fn supports_color(&self) -> bool { + match &self.err { + ShellOut::Write(_) => false, + ShellOut::Stream { stream, .. } => stream.supports_color(), + } + } - Some(arg) => return Err(human(format!("argument for --color must be auto, always, or \ - never, but found `{}`", - arg))), - }); + /// Prints a message and translates ANSI escape code into console colors. + pub fn print_ansi(&mut self, message: &[u8]) -> CargoResult<()> { + if self.needs_clear { + self.err_erase_line(); + } + #[cfg(windows)] + { + if let ShellOut::Stream { stream, .. } = &mut self.err { + ::fwdansi::write_ansi(stream, message)?; + return Ok(()); + } + } + self.err().write_all(message)?; Ok(()) } +} - pub fn get_verbose(&self) -> Verbosity { - self.verbosity +impl Default for Shell { + fn default() -> Self { + Self::new() } } -impl Shell { - pub fn create(out: Box, config: ShellConfig) -> Shell { - // Match from_env() to determine if creation of a TerminfoTerminal is possible regardless - // of the tty status. --color options are parsed after Shell creation so always try to - // create a terminal that supports color output. Fall back to a no-color terminal or write - // output to stderr if a tty is present and color output is not possible. - match ::term::terminfo::TermInfo::from_env() { - Ok(ti) => { - // Color output is possible. - Shell { - terminal: Colored(Box::new(TerminfoTerminal::new_with_terminfo(out, ti))), - config: config +impl ShellOut { + /// Prints out a message with a status. The status comes first, and is bold plus the given + /// color. The status can be justified, in which case the max width that will right align is + /// 12 chars. + fn print( + &mut self, + status: &dyn fmt::Display, + message: Option<&dyn fmt::Display>, + color: Color, + justified: bool, + ) -> CargoResult<()> { + match *self { + ShellOut::Stream { ref mut stream, .. } => { + stream.reset()?; + stream.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?; + if justified { + write!(stream, "{:>12}", status)?; + } else { + write!(stream, "{}", status)?; + } + stream.reset()?; + match message { + Some(message) => writeln!(stream, " {}", message)?, + None => write!(stream, " ")?, } } - _ if config.tty => { - // Color output is expected but not available, fall back to stderr. - Shell { terminal: NoColor(Box::new(io::stderr())), config: config } - } - _ => { - // No color output. - Shell { terminal: NoColor(out), config: config } + ShellOut::Write(ref mut w) => { + if justified { + write!(w, "{:>12}", status)?; + } else { + write!(w, "{}", status)?; + } + match message { + Some(message) => writeln!(w, " {}", message)?, + None => write!(w, " ")?, + } } } - } - - pub fn set_color_config(&mut self, color_config: ColorConfig) { - self.config.color_config = color_config; - } - - pub fn say(&mut self, message: T, color: Color) -> io::Result<()> { - try!(self.reset()); - if color != BLACK { try!(self.fg(color)); } - try!(write!(self, "{}\n", message.to_string())); - try!(self.reset()); - try!(self.flush()); Ok(()) } - pub fn say_status(&mut self, status: T, message: U, color: Color) - -> io::Result<()> - where T: fmt::Display, U: fmt::Display - { - try!(self.reset()); - if color != BLACK { try!(self.fg(color)); } - if self.supports_attr(Attr::Bold) { try!(self.attr(Attr::Bold)); } - try!(write!(self, "{:>12}", status.to_string())); - try!(self.reset()); - try!(write!(self, " {}\n", message)); - try!(self.flush()); - Ok(()) + /// Gets this object as a `io::Write`. + fn as_write(&mut self) -> &mut dyn Write { + match *self { + ShellOut::Stream { ref mut stream, .. } => stream, + ShellOut::Write(ref mut w) => w, + } } +} - fn fg(&mut self, color: color::Color) -> io::Result { - let colored = self.colored(); - - match self.terminal { - Colored(ref mut c) if colored => c.fg(color), - _ => Ok(false) +impl ColorChoice { + /// Converts our color choice to termcolor's version. + fn to_termcolor_color_choice(self) -> termcolor::ColorChoice { + match self { + ColorChoice::Always => termcolor::ColorChoice::Always, + ColorChoice::Never => termcolor::ColorChoice::Never, + ColorChoice::CargoAuto => { + if atty::is(atty::Stream::Stderr) { + termcolor::ColorChoice::Auto + } else { + termcolor::ColorChoice::Never + } + } } } +} - fn attr(&mut self, attr: Attr) -> io::Result { - let colored = self.colored(); +#[cfg(any(target_os = "linux", target_os = "macos"))] +mod imp { + use std::mem; - match self.terminal { - Colored(ref mut c) if colored => c.attr(attr), - _ => Ok(false) - } - } + use libc; - fn supports_attr(&self, attr: Attr) -> bool { - let colored = self.colored(); + use super::Shell; - match self.terminal { - Colored(ref c) if colored => c.supports_attr(attr), - _ => false + pub fn stderr_width() -> Option { + unsafe { + let mut winsize: libc::winsize = mem::zeroed(); + if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 { + return None; + } + if winsize.ws_col > 0 { + Some(winsize.ws_col as usize) + } else { + None + } } } - fn reset(&mut self) -> io::Result<()> { - let colored = self.colored(); - - match self.terminal { - Colored(ref mut c) if colored => c.reset().map(|_| ()), - _ => Ok(()) - } + pub fn err_erase_line(shell: &mut Shell) { + // This is the "EL - Erase in Line" sequence. It clears from the cursor + // to the end of line. + // https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences + let _ = shell.err.as_write().write_all(b"\x1B[K"); } +} + +#[cfg(all(unix, not(any(target_os = "linux", target_os = "macos"))))] +mod imp { + pub(super) use super::default_err_erase_line as err_erase_line; - fn colored(&self) -> bool { - self.config.tty && Auto == self.config.color_config - || Always == self.config.color_config + pub fn stderr_width() -> Option { + None } } -impl Write for Shell { - fn write(&mut self, buf: &[u8]) -> io::Result { - match self.terminal { - Colored(ref mut c) => c.write(buf), - NoColor(ref mut n) => n.write(buf) +#[cfg(windows)] +mod imp { + use std::{cmp, mem, ptr}; + use winapi::um::fileapi::*; + use winapi::um::handleapi::*; + use winapi::um::processenv::*; + use winapi::um::winbase::*; + use winapi::um::wincon::*; + use winapi::um::winnt::*; + + pub(super) use super::default_err_erase_line as err_erase_line; + + pub fn stderr_width() -> Option { + unsafe { + let stdout = GetStdHandle(STD_ERROR_HANDLE); + let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); + if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 { + return Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize); + } + + // On mintty/msys/cygwin based terminals, the above fails with + // INVALID_HANDLE_VALUE. Use an alternate method which works + // in that case as well. + let h = CreateFileA( + "CONOUT$\0".as_ptr() as *const CHAR, + GENERIC_READ | GENERIC_WRITE, + FILE_SHARE_READ | FILE_SHARE_WRITE, + ptr::null_mut(), + OPEN_EXISTING, + 0, + ptr::null_mut(), + ); + if h == INVALID_HANDLE_VALUE { + return None; + } + + let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); + let rc = GetConsoleScreenBufferInfo(h, &mut csbi); + CloseHandle(h); + if rc != 0 { + let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize; + // Unfortunately cygwin/mintty does not set the size of the + // backing console to match the actual window size. This + // always reports a size of 80 or 120 (not sure what + // determines that). Use a conservative max of 60 which should + // work in most circumstances. ConEmu does some magic to + // resize the console correctly, but there's no reasonable way + // to detect which kind of terminal we are running in, or if + // GetConsoleScreenBufferInfo returns accurate information. + return Some(cmp::min(60, width)); + } + None } } +} - fn flush(&mut self) -> io::Result<()> { - match self.terminal { - Colored(ref mut c) => c.flush(), - NoColor(ref mut n) => n.flush() - } +#[cfg(any(all(unix, not(any(target_os = "linux", target_os = "macos"))), windows,))] +fn default_err_erase_line(shell: &mut Shell) { + if let Some(max_width) = imp::stderr_width() { + let blank = " ".repeat(max_width); + drop(write!(shell.err.as_write(), "{}\r", blank)); } } diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs deleted file mode 100644 index d18010a36ef..00000000000 --- a/src/cargo/core/source.rs +++ /dev/null @@ -1,506 +0,0 @@ -use std::cmp::Ordering; -use std::collections::hash_map::{HashMap, Values, IterMut}; -use std::fmt::{self, Formatter}; -use std::hash; -use std::mem; -use std::path::Path; -use std::sync::Arc; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; - -use url::Url; - -use core::{Summary, Package, PackageId, Registry, Dependency}; -use sources::{PathSource, GitSource, RegistrySource}; -use sources::git; -use util::{human, Config, CargoResult, ToUrl}; - -/// A Source finds and downloads remote packages based on names and -/// versions. -pub trait Source: Registry { - /// The update method performs any network operations required to - /// get the entire list of all names, versions and dependencies of - /// packages managed by the Source. - fn update(&mut self) -> CargoResult<()>; - - /// The download method fetches the full package for each name and - /// version specified. - fn download(&mut self, packages: &[PackageId]) -> CargoResult<()>; - - /// The get method returns the Path of each specified package on the - /// local file system. It assumes that `download` was already called, - /// and that the packages are already locally available on the file - /// system. - fn get(&self, packages: &[PackageId]) -> CargoResult>; - - /// Generates a unique string which represents the fingerprint of the - /// current state of the source. - /// - /// This fingerprint is used to determine the "fresheness" of the source - /// later on. It must be guaranteed that the fingerprint of a source is - /// constant if and only if the output product will remain constant. - /// - /// The `pkg` argument is the package which this fingerprint should only be - /// interested in for when this source may contain multiple packages. - fn fingerprint(&self, pkg: &Package) -> CargoResult; -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -enum Kind { - /// Kind::Git() represents a git repository - Git(GitReference), - /// represents a local path - Path, - /// represents the central registry - Registry, -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum GitReference { - Tag(String), - Branch(String), - Rev(String), -} - -/// Unique identifier for a source of packages. -#[derive(Clone, Eq, Debug)] -pub struct SourceId { - inner: Arc, -} - -#[derive(Eq, Clone, Debug)] -struct SourceIdInner { - url: Url, - canonical_url: Url, - kind: Kind, - // e.g. the exact git revision of the specified branch for a Git Source - precise: Option -} - -impl SourceId { - fn new(kind: Kind, url: Url) -> SourceId { - SourceId { - inner: Arc::new(SourceIdInner { - kind: kind, - canonical_url: git::canonicalize_url(&url), - url: url, - precise: None, - }), - } - } - - /// Parses a source URL and returns the corresponding ID. - /// - /// ## Example - /// - /// ``` - /// use cargo::core::SourceId; - /// SourceId::from_url("git+https://github.com/alexcrichton/\ - /// libssh2-static-sys#80e71a3021618eb05\ - /// 656c58fb7c5ef5f12bc747f".to_string()); - /// ``` - pub fn from_url(string: String) -> SourceId { - let mut parts = string.splitn(2, '+'); - let kind = parts.next().unwrap(); - let url = parts.next().unwrap(); - - match kind { - "git" => { - let mut url = url.to_url().unwrap(); - let mut reference = GitReference::Branch("master".to_string()); - let pairs = url.query_pairs().unwrap_or(Vec::new()); - for &(ref k, ref v) in pairs.iter() { - match &k[..] { - // map older 'ref' to branch - "branch" | - "ref" => reference = GitReference::Branch(v.clone()), - - "rev" => reference = GitReference::Rev(v.clone()), - "tag" => reference = GitReference::Tag(v.clone()), - _ => {} - } - } - url.query = None; - let precise = mem::replace(&mut url.fragment, None); - SourceId::for_git(&url, reference) - .with_precise(precise) - }, - "registry" => { - let url = url.to_url().unwrap(); - SourceId::new(Kind::Registry, url) - .with_precise(Some("locked".to_string())) - } - "path" => SourceId::for_path(Path::new(&url[5..])).unwrap(), - _ => panic!("Unsupported serialized SourceId") - } - } - - pub fn to_url(&self) -> String { - match *self.inner { - SourceIdInner { kind: Kind::Path, .. } => { - panic!("Path sources are not included in the lockfile, \ - so this is unimplemented") - }, - SourceIdInner { - kind: Kind::Git(ref reference), ref url, ref precise, .. - } => { - let ref_str = url_ref(reference); - - let precise_str = if precise.is_some() { - format!("#{}", precise.as_ref().unwrap()) - } else { - "".to_string() - }; - - format!("git+{}{}{}", url, ref_str, precise_str) - }, - SourceIdInner { kind: Kind::Registry, ref url, .. } => { - format!("registry+{}", url) - } - } - } - - // Pass absolute path - pub fn for_path(path: &Path) -> CargoResult { - let url = try!(path.to_url().map_err(human)); - Ok(SourceId::new(Kind::Path, url)) - } - - pub fn for_git(url: &Url, reference: GitReference) -> SourceId { - SourceId::new(Kind::Git(reference), url.clone()) - } - - pub fn for_registry(url: &Url) -> SourceId { - SourceId::new(Kind::Registry, url.clone()) - } - - /// Returns the `SourceId` corresponding to the main repository. - /// - /// This is the main cargo registry by default, but it can be overridden in - /// a `.cargo/config`. - pub fn for_central(config: &Config) -> CargoResult { - Ok(SourceId::for_registry(&try!(RegistrySource::url(config)))) - } - - pub fn url(&self) -> &Url { &self.inner.url } - pub fn is_path(&self) -> bool { self.inner.kind == Kind::Path } - pub fn is_registry(&self) -> bool { self.inner.kind == Kind::Registry } - - pub fn is_git(&self) -> bool { - match self.inner.kind { - Kind::Git(_) => true, - _ => false - } - } - - /// Creates an implementation of `Source` corresponding to this ID. - pub fn load<'a>(&self, config: &'a Config) -> Box { - trace!("loading SourceId; {}", self); - match self.inner.kind { - Kind::Git(..) => Box::new(GitSource::new(self, config)), - Kind::Path => { - let path = match self.inner.url.to_file_path() { - Ok(p) => p, - Err(()) => panic!("path sources cannot be remote"), - }; - Box::new(PathSource::new(&path, self, config)) - } - Kind::Registry => Box::new(RegistrySource::new(self, config)), - } - } - - pub fn precise(&self) -> Option<&str> { - self.inner.precise.as_ref().map(|s| &s[..]) - } - - pub fn git_reference(&self) -> Option<&GitReference> { - match self.inner.kind { - Kind::Git(ref s) => Some(s), - _ => None, - } - } - - pub fn with_precise(&self, v: Option) -> SourceId { - SourceId { - inner: Arc::new(SourceIdInner { - precise: v, - .. (*self.inner).clone() - }), - } - } - - pub fn is_default_registry(&self) -> bool { - match self.inner.kind { - Kind::Registry => {} - _ => return false, - } - self.inner.url.to_string() == RegistrySource::default_url() - } -} - -impl PartialEq for SourceId { - fn eq(&self, other: &SourceId) -> bool { - (*self.inner).eq(&*other.inner) - } -} - -impl PartialOrd for SourceId { - fn partial_cmp(&self, other: &SourceId) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for SourceId { - fn cmp(&self, other: &SourceId) -> Ordering { - match self.inner.kind.cmp(&other.inner.kind) { - Ordering::Equal => {} - ord => return ord, - } - if let Kind::Git(..) = self.inner.kind { - match self.inner.precise.cmp(&other.inner.precise) { - Ordering::Equal => {} - ord => return ord, - } - } - self.inner.url.cmp(&other.inner.url) - } -} - -impl Encodable for SourceId { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - if self.is_path() { - s.emit_option_none() - } else { - self.to_url().encode(s) - } - } -} - -impl Decodable for SourceId { - fn decode(d: &mut D) -> Result { - let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId"); - Ok(SourceId::from_url(string)) - } -} - -impl fmt::Display for SourceId { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match *self.inner { - SourceIdInner { kind: Kind::Path, ref url, .. } => { - fmt::Display::fmt(url, f) - } - SourceIdInner { kind: Kind::Git(ref reference), ref url, - ref precise, .. } => { - try!(write!(f, "{}{}", url, url_ref(reference))); - - match *precise { - Some(ref s) => { - try!(write!(f, "#{}", &s[..8])); - } - None => {} - } - Ok(()) - }, - SourceIdInner { kind: Kind::Registry, ref url, .. } => { - write!(f, "registry {}", url) - } - } - } -} - -// This custom implementation handles situations such as when two git sources -// point at *almost* the same URL, but not quite, even when they actually point -// to the same repository. -impl PartialEq for SourceIdInner { - fn eq(&self, other: &SourceIdInner) -> bool { - if self.kind != other.kind { return false } - if self.url == other.url { return true } - - match (&self.kind, &other.kind) { - (&Kind::Git(ref ref1), &Kind::Git(ref ref2)) => { - ref1 == ref2 && self.canonical_url == other.canonical_url - } - _ => false, - } - } -} - -impl hash::Hash for SourceId { - fn hash(&self, into: &mut S) { - self.inner.kind.hash(into); - match *self.inner { - SourceIdInner { kind: Kind::Git(..), ref canonical_url, .. } => { - canonical_url.hash(into) - } - _ => self.inner.url.hash(into), - } - } -} - -fn url_ref(r: &GitReference) -> String { - match r.to_ref_string() { - None => "".to_string(), - Some(s) => format!("?{}", s), - } -} - -impl GitReference { - pub fn to_ref_string(&self) -> Option { - match *self { - GitReference::Branch(ref s) => { - if *s == "master" { - None - } else { - Some(format!("branch={}", s)) - } - } - GitReference::Tag(ref s) => Some(format!("tag={}", s)), - GitReference::Rev(ref s) => Some(format!("rev={}", s)), - } - } -} - -pub struct SourceMap<'src> { - map: HashMap> -} - -pub type Sources<'a, 'src> = Values<'a, SourceId, Box>; - -pub struct SourcesMut<'a, 'src: 'a> { - inner: IterMut<'a, SourceId, Box>, -} - -impl<'src> SourceMap<'src> { - pub fn new() -> SourceMap<'src> { - SourceMap { - map: HashMap::new() - } - } - - pub fn contains(&self, id: &SourceId) -> bool { - self.map.contains_key(id) - } - - pub fn get(&self, id: &SourceId) -> Option<&(Source+'src)> { - let source = self.map.get(id); - - source.map(|s| { - let s: &(Source+'src) = &**s; - s - }) - } - - pub fn get_mut(&mut self, id: &SourceId) -> Option<&mut (Source+'src)> { - self.map.get_mut(id).map(|s| { - let s: &mut (Source+'src) = &mut **s; - s - }) - } - - pub fn get_by_package_id(&self, pkg_id: &PackageId) -> Option<&(Source+'src)> { - self.get(pkg_id.source_id()) - } - - pub fn insert(&mut self, id: &SourceId, source: Box) { - self.map.insert(id.clone(), source); - } - - pub fn len(&self) -> usize { - self.map.len() - } - - pub fn sources<'a>(&'a self) -> Sources<'a, 'src> { - self.map.values() - } - - pub fn sources_mut<'a>(&'a mut self) -> SourcesMut<'a, 'src> { - SourcesMut { inner: self.map.iter_mut() } - } -} - -impl<'a, 'src> Iterator for SourcesMut<'a, 'src> { - type Item = (&'a SourceId, &'a mut (Source + 'src)); - fn next(&mut self) -> Option<(&'a SourceId, &'a mut (Source + 'src))> { - self.inner.next().map(|(a, b)| (a, &mut **b)) - } -} - -/// List of `Source` implementors. `SourceSet` itself implements `Source`. -pub struct SourceSet<'src> { - sources: Vec> -} - -impl<'src> SourceSet<'src> { - pub fn new(sources: Vec>) -> SourceSet<'src> { - SourceSet { sources: sources } - } -} - -impl<'src> Registry for SourceSet<'src> { - fn query(&mut self, name: &Dependency) -> CargoResult> { - let mut ret = Vec::new(); - - for source in self.sources.iter_mut() { - ret.extend(try!(source.query(name)).into_iter()); - } - - Ok(ret) - } -} - -impl<'src> Source for SourceSet<'src> { - fn update(&mut self) -> CargoResult<()> { - for source in self.sources.iter_mut() { - try!(source.update()); - } - - Ok(()) - } - - fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> { - for source in self.sources.iter_mut() { - try!(source.download(packages)); - } - - Ok(()) - } - - fn get(&self, packages: &[PackageId]) -> CargoResult> { - let mut ret = Vec::new(); - - for source in self.sources.iter() { - ret.extend(try!(source.get(packages)).into_iter()); - } - - Ok(ret) - } - - fn fingerprint(&self, id: &Package) -> CargoResult { - let mut ret = String::new(); - for source in self.sources.iter() { - ret.push_str(&try!(source.fingerprint(id))[..]); - } - Ok(ret) - } -} - -#[cfg(test)] -mod tests { - use super::{SourceId, Kind, GitReference}; - use util::ToUrl; - - #[test] - fn github_sources_equal() { - let loc = "https://github.com/foo/bar".to_url().unwrap(); - let master = Kind::Git(GitReference::Branch("master".to_string())); - let s1 = SourceId::new(master.clone(), loc); - - let loc = "git://github.com/foo/bar".to_url().unwrap(); - let s2 = SourceId::new(master, loc.clone()); - - assert_eq!(s1, s2); - - let foo = Kind::Git(GitReference::Branch("foo".to_string())); - let s3 = SourceId::new(foo, loc); - assert!(s1 != s3); - } -} diff --git a/src/cargo/core/source/mod.rs b/src/cargo/core/source/mod.rs new file mode 100644 index 00000000000..0a07b8dfb6e --- /dev/null +++ b/src/cargo/core/source/mod.rs @@ -0,0 +1,321 @@ +use std::collections::hash_map::HashMap; +use std::fmt; + +use crate::core::package::PackageSet; +use crate::core::{Dependency, Package, PackageId, Summary}; +use crate::util::{CargoResult, Config}; + +mod source_id; + +pub use self::source_id::{GitReference, SourceId}; + +/// Something that finds and downloads remote packages based on names and versions. +pub trait Source { + /// Returns the `SourceId` corresponding to this source. + fn source_id(&self) -> SourceId; + + /// Returns the replaced `SourceId` corresponding to this source. + fn replaced_source_id(&self) -> SourceId { + self.source_id() + } + + /// Returns whether or not this source will return summaries with + /// checksums listed. + fn supports_checksums(&self) -> bool; + + /// Returns whether or not this source will return summaries with + /// the `precise` field in the source id listed. + fn requires_precise(&self) -> bool; + + /// Attempts to find the packages that match a dependency request. + fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()>; + + /// Attempts to find the packages that are close to a dependency request. + /// Each source gets to define what `close` means for it. + /// Path/Git sources may return all dependencies that are at that URI, + /// whereas an `Index` source may return dependencies that have the same canonicalization. + fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()>; + + fn query_vec(&mut self, dep: &Dependency) -> CargoResult> { + let mut ret = Vec::new(); + self.query(dep, &mut |s| ret.push(s))?; + Ok(ret) + } + + /// Performs any network operations required to get the entire list of all names, + /// versions and dependencies of packages managed by the `Source`. + fn update(&mut self) -> CargoResult<()>; + + /// Fetches the full package for each name and version specified. + fn download(&mut self, package: PackageId) -> CargoResult; + + fn download_now(self: Box, package: PackageId, config: &Config) -> CargoResult + where + Self: std::marker::Sized, + { + let mut sources = SourceMap::new(); + sources.insert(self); + let pkg_set = PackageSet::new(&[package], sources, config)?; + Ok(pkg_set.get_one(package)?.clone()) + } + + fn finish_download(&mut self, package: PackageId, contents: Vec) -> CargoResult; + + /// Generates a unique string which represents the fingerprint of the + /// current state of the source. + /// + /// This fingerprint is used to determine the "fresheness" of the source + /// later on. It must be guaranteed that the fingerprint of a source is + /// constant if and only if the output product will remain constant. + /// + /// The `pkg` argument is the package which this fingerprint should only be + /// interested in for when this source may contain multiple packages. + fn fingerprint(&self, pkg: &Package) -> CargoResult; + + /// If this source supports it, verifies the source of the package + /// specified. + /// + /// Note that the source may also have performed other checksum-based + /// verification during the `download` step, but this is intended to be run + /// just before a crate is compiled so it may perform more expensive checks + /// which may not be cacheable. + fn verify(&self, _pkg: PackageId) -> CargoResult<()> { + Ok(()) + } + + /// Describes this source in a human readable fashion, used for display in + /// resolver error messages currently. + fn describe(&self) -> String; + + /// Returns whether a source is being replaced by another here. + fn is_replaced(&self) -> bool { + false + } + + /// Add a number of crates that should be whitelisted for showing up during + /// queries, even if they are yanked. Currently only applies to registry + /// sources. + fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]); + + /// Query if a package is yanked. Only registry sources can mark packages + /// as yanked. This ignores the yanked whitelist. + fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult; +} + +pub enum MaybePackage { + Ready(Package), + Download { url: String, descriptor: String }, +} + +impl<'a, T: Source + ?Sized + 'a> Source for Box { + /// Forwards to `Source::source_id`. + fn source_id(&self) -> SourceId { + (**self).source_id() + } + + /// Forwards to `Source::replaced_source_id`. + fn replaced_source_id(&self) -> SourceId { + (**self).replaced_source_id() + } + + /// Forwards to `Source::supports_checksums`. + fn supports_checksums(&self) -> bool { + (**self).supports_checksums() + } + + /// Forwards to `Source::requires_precise`. + fn requires_precise(&self) -> bool { + (**self).requires_precise() + } + + /// Forwards to `Source::query`. + fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + (**self).query(dep, f) + } + + /// Forwards to `Source::query`. + fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + (**self).fuzzy_query(dep, f) + } + + /// Forwards to `Source::update`. + fn update(&mut self) -> CargoResult<()> { + (**self).update() + } + + /// Forwards to `Source::download`. + fn download(&mut self, id: PackageId) -> CargoResult { + (**self).download(id) + } + + fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { + (**self).finish_download(id, data) + } + + /// Forwards to `Source::fingerprint`. + fn fingerprint(&self, pkg: &Package) -> CargoResult { + (**self).fingerprint(pkg) + } + + /// Forwards to `Source::verify`. + fn verify(&self, pkg: PackageId) -> CargoResult<()> { + (**self).verify(pkg) + } + + fn describe(&self) -> String { + (**self).describe() + } + + fn is_replaced(&self) -> bool { + (**self).is_replaced() + } + + fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { + (**self).add_to_yanked_whitelist(pkgs); + } + + fn is_yanked(&mut self, pkg: PackageId) -> CargoResult { + (**self).is_yanked(pkg) + } +} + +impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T { + fn source_id(&self) -> SourceId { + (**self).source_id() + } + + fn replaced_source_id(&self) -> SourceId { + (**self).replaced_source_id() + } + + fn supports_checksums(&self) -> bool { + (**self).supports_checksums() + } + + fn requires_precise(&self) -> bool { + (**self).requires_precise() + } + + fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + (**self).query(dep, f) + } + + fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + (**self).fuzzy_query(dep, f) + } + + fn update(&mut self) -> CargoResult<()> { + (**self).update() + } + + fn download(&mut self, id: PackageId) -> CargoResult { + (**self).download(id) + } + + fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { + (**self).finish_download(id, data) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + (**self).fingerprint(pkg) + } + + fn verify(&self, pkg: PackageId) -> CargoResult<()> { + (**self).verify(pkg) + } + + fn describe(&self) -> String { + (**self).describe() + } + + fn is_replaced(&self) -> bool { + (**self).is_replaced() + } + + fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { + (**self).add_to_yanked_whitelist(pkgs); + } + + fn is_yanked(&mut self, pkg: PackageId) -> CargoResult { + (**self).is_yanked(pkg) + } +} + +/// A `HashMap` of `SourceId` -> `Box`. +#[derive(Default)] +pub struct SourceMap<'src> { + map: HashMap>, +} + +// `impl Debug` on source requires specialization, if even desirable at all. +impl<'src> fmt::Debug for SourceMap<'src> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "SourceMap ")?; + f.debug_set().entries(self.map.keys()).finish() + } +} + +impl<'src> SourceMap<'src> { + /// Creates an empty map. + pub fn new() -> SourceMap<'src> { + SourceMap { + map: HashMap::new(), + } + } + + /// Like `HashMap::contains_key`. + pub fn contains(&self, id: SourceId) -> bool { + self.map.contains_key(&id) + } + + /// Like `HashMap::get`. + pub fn get(&self, id: SourceId) -> Option<&(dyn Source + 'src)> { + let source = self.map.get(&id); + + source.map(|s| { + let s: &(dyn Source + 'src) = &**s; + s + }) + } + + /// Like `HashMap::get_mut`. + pub fn get_mut(&mut self, id: SourceId) -> Option<&mut (dyn Source + 'src)> { + self.map.get_mut(&id).map(|s| { + let s: &mut (dyn Source + 'src) = &mut **s; + s + }) + } + + /// Like `HashMap::get`, but first calculates the `SourceId` from a `PackageId`. + pub fn get_by_package_id(&self, pkg_id: PackageId) -> Option<&(dyn Source + 'src)> { + self.get(pkg_id.source_id()) + } + + /// Like `HashMap::insert`, but derives the `SourceId` key from the `Source`. + pub fn insert(&mut self, source: Box) { + let id = source.source_id(); + self.map.insert(id, source); + } + + /// Like `HashMap::is_empty`. + pub fn is_empty(&self) -> bool { + self.map.is_empty() + } + + /// Like `HashMap::len`. + pub fn len(&self) -> usize { + self.map.len() + } + + /// Like `HashMap::values`. + pub fn sources<'a>(&'a self) -> impl Iterator> { + self.map.values() + } + + /// Like `HashMap::iter_mut`. + pub fn sources_mut<'a>( + &'a mut self, + ) -> impl Iterator { + self.map.iter_mut().map(|(a, b)| (a, &mut **b)) + } +} diff --git a/src/cargo/core/source/source_id.rs b/src/cargo/core/source/source_id.rs new file mode 100644 index 00000000000..aa2045aee63 --- /dev/null +++ b/src/cargo/core/source/source_id.rs @@ -0,0 +1,599 @@ +use std::cmp::{self, Ordering}; +use std::collections::HashSet; +use std::fmt::{self, Formatter}; +use std::hash::{self, Hash}; +use std::path::Path; +use std::ptr; +use std::sync::atomic::AtomicBool; +use std::sync::atomic::Ordering::SeqCst; +use std::sync::Mutex; + +use log::trace; +use serde::de; +use serde::ser; +use url::Url; + +use crate::core::PackageId; +use crate::ops; +use crate::sources::git; +use crate::sources::DirectorySource; +use crate::sources::{GitSource, PathSource, RegistrySource, CRATES_IO_INDEX}; +use crate::util::{CargoResult, Config, IntoUrl}; + +lazy_static::lazy_static! { + static ref SOURCE_ID_CACHE: Mutex> = Mutex::new(HashSet::new()); +} + +/// Unique identifier for a source of packages. +#[derive(Clone, Copy, Eq, Debug)] +pub struct SourceId { + inner: &'static SourceIdInner, +} + +#[derive(PartialEq, Eq, Clone, Debug, Hash)] +struct SourceIdInner { + /// The source URL. + url: Url, + /// The result of `git::canonicalize_url()` on `url` field. + canonical_url: Url, + /// The source kind. + kind: Kind, + /// For example, the exact Git revision of the specified branch for a Git Source. + precise: Option, + /// Name of the registry source for alternative registries + /// WARNING: this is not always set for alt-registries when the name is + /// not known. + name: Option, +} + +/// The possible kinds of code source. Along with `SourceIdInner`, this fully defines the +/// source. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +enum Kind { + /// A git repository. + Git(GitReference), + /// A local path.. + Path, + /// A remote registry. + Registry, + /// A local filesystem-based registry. + LocalRegistry, + /// A directory-based registry. + Directory, +} + +/// Information to find a specific commit in a Git repository. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum GitReference { + /// From a tag. + Tag(String), + /// From the HEAD of a branch. + Branch(String), + /// From a specific revision. + Rev(String), +} + +impl SourceId { + /// Creates a `SourceId` object from the kind and URL. + /// + /// The canonical url will be calculated, but the precise field will not + fn new(kind: Kind, url: Url) -> CargoResult { + let source_id = SourceId::wrap(SourceIdInner { + kind, + canonical_url: git::canonicalize_url(&url)?, + url, + precise: None, + name: None, + }); + Ok(source_id) + } + + fn wrap(inner: SourceIdInner) -> SourceId { + let mut cache = SOURCE_ID_CACHE.lock().unwrap(); + let inner = cache.get(&inner).cloned().unwrap_or_else(|| { + let inner = Box::leak(Box::new(inner)); + cache.insert(inner); + inner + }); + SourceId { inner } + } + + /// Parses a source URL and returns the corresponding ID. + /// + /// ## Example + /// + /// ``` + /// use cargo::core::SourceId; + /// SourceId::from_url("git+https://github.com/alexcrichton/\ + /// libssh2-static-sys#80e71a3021618eb05\ + /// 656c58fb7c5ef5f12bc747f"); + /// ``` + pub fn from_url(string: &str) -> CargoResult { + let mut parts = string.splitn(2, '+'); + let kind = parts.next().unwrap(); + let url = parts + .next() + .ok_or_else(|| failure::format_err!("invalid source `{}`", string))?; + + match kind { + "git" => { + let mut url = url.into_url()?; + let mut reference = GitReference::Branch("master".to_string()); + for (k, v) in url.query_pairs() { + match &k[..] { + // Map older 'ref' to branch. + "branch" | "ref" => reference = GitReference::Branch(v.into_owned()), + + "rev" => reference = GitReference::Rev(v.into_owned()), + "tag" => reference = GitReference::Tag(v.into_owned()), + _ => {} + } + } + let precise = url.fragment().map(|s| s.to_owned()); + url.set_fragment(None); + url.set_query(None); + Ok(SourceId::for_git(&url, reference)?.with_precise(precise)) + } + "registry" => { + let url = url.into_url()?; + Ok(SourceId::new(Kind::Registry, url)?.with_precise(Some("locked".to_string()))) + } + "path" => { + let url = url.into_url()?; + SourceId::new(Kind::Path, url) + } + kind => Err(failure::format_err!( + "unsupported source protocol: {}", + kind + )), + } + } + + /// A view of the `SourceId` that can be `Display`ed as a URL. + pub fn into_url(&self) -> SourceIdIntoUrl<'_> { + SourceIdIntoUrl { + inner: &*self.inner, + } + } + + /// Creates a `SourceId` from a filesystem path. + /// + /// `path`: an absolute path. + pub fn for_path(path: &Path) -> CargoResult { + let url = path.into_url()?; + SourceId::new(Kind::Path, url) + } + + /// Creates a `SourceId` from a Git reference. + pub fn for_git(url: &Url, reference: GitReference) -> CargoResult { + SourceId::new(Kind::Git(reference), url.clone()) + } + + /// Creates a SourceId from a registry URL. + pub fn for_registry(url: &Url) -> CargoResult { + SourceId::new(Kind::Registry, url.clone()) + } + + /// Creates a SourceId from a local registry path. + pub fn for_local_registry(path: &Path) -> CargoResult { + let url = path.into_url()?; + SourceId::new(Kind::LocalRegistry, url) + } + + /// Creates a `SourceId` from a directory path. + pub fn for_directory(path: &Path) -> CargoResult { + let url = path.into_url()?; + SourceId::new(Kind::Directory, url) + } + + /// Returns the `SourceId` corresponding to the main repository. + /// + /// This is the main cargo registry by default, but it can be overridden in + /// a `.cargo/config`. + pub fn crates_io(config: &Config) -> CargoResult { + config.crates_io_source_id(|| { + let cfg = ops::registry_configuration(config, None)?; + let url = if let Some(ref index) = cfg.index { + static WARNED: AtomicBool = AtomicBool::new(false); + if !WARNED.swap(true, SeqCst) { + config.shell().warn( + "custom registry support via \ + the `registry.index` configuration is \ + being removed, this functionality \ + will not work in the future", + )?; + } + &index[..] + } else { + CRATES_IO_INDEX + }; + let url = url.into_url()?; + SourceId::for_registry(&url) + }) + } + + pub fn alt_registry(config: &Config, key: &str) -> CargoResult { + let url = config.get_registry_index(key)?; + Ok(SourceId::wrap(SourceIdInner { + kind: Kind::Registry, + canonical_url: git::canonicalize_url(&url)?, + url, + precise: None, + name: Some(key.to_string()), + })) + } + + /// Gets this source URL. + pub fn url(&self) -> &Url { + &self.inner.url + } + + pub fn display_index(self) -> String { + if self.is_default_registry() { + "crates.io index".to_string() + } else { + format!("`{}` index", url_display(self.url())) + } + } + + pub fn display_registry_name(self) -> String { + if self.is_default_registry() { + "crates.io".to_string() + } else if let Some(name) = &self.inner.name { + name.clone() + } else { + url_display(self.url()) + } + } + + /// Returns `true` if this source is from a filesystem path. + pub fn is_path(self) -> bool { + self.inner.kind == Kind::Path + } + + /// Returns `true` if this source is from a registry (either local or not). + pub fn is_registry(self) -> bool { + match self.inner.kind { + Kind::Registry | Kind::LocalRegistry => true, + _ => false, + } + } + + /// Returns `true` if this source from a Git repository. + pub fn is_git(self) -> bool { + match self.inner.kind { + Kind::Git(_) => true, + _ => false, + } + } + + /// Creates an implementation of `Source` corresponding to this ID. + pub fn load<'a>( + self, + config: &'a Config, + yanked_whitelist: &HashSet, + ) -> CargoResult> { + trace!("loading SourceId; {}", self); + match self.inner.kind { + Kind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)), + Kind::Path => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(PathSource::new(&path, self, config))) + } + Kind::Registry => Ok(Box::new(RegistrySource::remote( + self, + yanked_whitelist, + config, + ))), + Kind::LocalRegistry => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(RegistrySource::local( + self, + &path, + yanked_whitelist, + config, + ))) + } + Kind::Directory => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Ok(Box::new(DirectorySource::new(&path, self, config))) + } + } + } + + /// Gets the value of the precise field. + pub fn precise(self) -> Option<&'static str> { + self.inner.precise.as_ref().map(|s| &s[..]) + } + + /// Gets the Git reference if this is a git source, otherwise `None`. + pub fn git_reference(self) -> Option<&'static GitReference> { + match self.inner.kind { + Kind::Git(ref s) => Some(s), + _ => None, + } + } + + /// Creates a new `SourceId` from this source with the given `precise`. + pub fn with_precise(self, v: Option) -> SourceId { + SourceId::wrap(SourceIdInner { + precise: v, + ..(*self.inner).clone() + }) + } + + /// Returns `true` if the remote registry is the standard . + pub fn is_default_registry(self) -> bool { + match self.inner.kind { + Kind::Registry => {} + _ => return false, + } + self.inner.url.as_str() == CRATES_IO_INDEX + } + + /// Hashes `self`. + /// + /// For paths, remove the workspace prefix so the same source will give the + /// same hash in different locations. + pub fn stable_hash(self, workspace: &Path, into: &mut S) { + if self.is_path() { + if let Ok(p) = self + .inner + .url + .to_file_path() + .unwrap() + .strip_prefix(workspace) + { + self.inner.kind.hash(into); + p.to_str().unwrap().hash(into); + return; + } + } + self.hash(into) + } + + pub fn full_eq(self, other: SourceId) -> bool { + ptr::eq(self.inner, other.inner) + } + + pub fn full_hash(self, into: &mut S) { + ptr::NonNull::from(self.inner).hash(into) + } +} + +impl PartialOrd for SourceId { + fn partial_cmp(&self, other: &SourceId) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for SourceId { + fn cmp(&self, other: &SourceId) -> Ordering { + self.inner.cmp(other.inner) + } +} + +impl ser::Serialize for SourceId { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + if self.is_path() { + None::.serialize(s) + } else { + s.collect_str(&self.into_url()) + } + } +} + +impl<'de> de::Deserialize<'de> for SourceId { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let string = String::deserialize(d)?; + SourceId::from_url(&string).map_err(de::Error::custom) + } +} + +fn url_display(url: &Url) -> String { + if url.scheme() == "file" { + if let Ok(path) = url.to_file_path() { + if let Some(path_str) = path.to_str() { + return path_str.to_string(); + } + } + } + + url.as_str().to_string() +} + +impl fmt::Display for SourceId { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + match self.inner.kind { + Kind::Git(ref reference) => { + // Don't replace the URL display for git references, + // because those are kind of expected to be URLs. + write!(f, "{}", self.inner.url)?; + if let Some(pretty) = reference.pretty_ref() { + write!(f, "?{}", pretty)?; + } + + if let Some(ref s) = self.inner.precise { + let len = cmp::min(s.len(), 8); + write!(f, "#{}", &s[..len])?; + } + Ok(()) + } + Kind::Path => write!(f, "{}", url_display(&self.inner.url)), + Kind::Registry => write!(f, "registry `{}`", url_display(&self.inner.url)), + Kind::LocalRegistry => write!(f, "registry `{}`", url_display(&self.inner.url)), + Kind::Directory => write!(f, "dir {}", url_display(&self.inner.url)), + } + } +} + +// Custom equality defined as canonical URL equality for git sources and +// URL equality for other sources, ignoring the `precise` and `name` fields. +impl PartialEq for SourceId { + fn eq(&self, other: &SourceId) -> bool { + if ptr::eq(self.inner, other.inner) { + return true; + } + if self.inner.kind != other.inner.kind { + return false; + } + if self.inner.url == other.inner.url { + return true; + } + + match (&self.inner.kind, &other.inner.kind) { + (Kind::Git(ref1), Kind::Git(ref2)) => { + ref1 == ref2 && self.inner.canonical_url == other.inner.canonical_url + } + _ => false, + } + } +} + +impl PartialOrd for SourceIdInner { + fn partial_cmp(&self, other: &SourceIdInner) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for SourceIdInner { + fn cmp(&self, other: &SourceIdInner) -> Ordering { + match self.kind.cmp(&other.kind) { + Ordering::Equal => {} + ord => return ord, + } + match self.url.cmp(&other.url) { + Ordering::Equal => {} + ord => return ord, + } + match (&self.kind, &other.kind) { + (Kind::Git(ref1), Kind::Git(ref2)) => { + (ref1, &self.canonical_url).cmp(&(ref2, &other.canonical_url)) + } + _ => self.kind.cmp(&other.kind), + } + } +} + +// The hash of SourceId is used in the name of some Cargo folders, so shouldn't +// vary. `as_str` gives the serialisation of a url (which has a spec) and so +// insulates against possible changes in how the url crate does hashing. +impl Hash for SourceId { + fn hash(&self, into: &mut S) { + self.inner.kind.hash(into); + match self.inner.kind { + Kind::Git(_) => self.inner.canonical_url.as_str().hash(into), + _ => self.inner.url.as_str().hash(into), + } + } +} + +/// A `Display`able view into a `SourceId` that will write it as a url +pub struct SourceIdIntoUrl<'a> { + inner: &'a SourceIdInner, +} + +impl<'a> fmt::Display for SourceIdIntoUrl<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self.inner { + SourceIdInner { + kind: Kind::Path, + ref url, + .. + } => write!(f, "path+{}", url), + SourceIdInner { + kind: Kind::Git(ref reference), + ref url, + ref precise, + .. + } => { + write!(f, "git+{}", url)?; + if let Some(pretty) = reference.pretty_ref() { + write!(f, "?{}", pretty)?; + } + if let Some(precise) = precise.as_ref() { + write!(f, "#{}", precise)?; + } + Ok(()) + } + SourceIdInner { + kind: Kind::Registry, + ref url, + .. + } => write!(f, "registry+{}", url), + SourceIdInner { + kind: Kind::LocalRegistry, + ref url, + .. + } => write!(f, "local-registry+{}", url), + SourceIdInner { + kind: Kind::Directory, + ref url, + .. + } => write!(f, "directory+{}", url), + } + } +} + +impl GitReference { + /// Returns a `Display`able view of this git reference, or None if using + /// the head of the "master" branch + pub fn pretty_ref(&self) -> Option> { + match *self { + GitReference::Branch(ref s) if *s == "master" => None, + _ => Some(PrettyRef { inner: self }), + } + } +} + +/// A git reference that can be `Display`ed +pub struct PrettyRef<'a> { + inner: &'a GitReference, +} + +impl<'a> fmt::Display for PrettyRef<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self.inner { + GitReference::Branch(ref b) => write!(f, "branch={}", b), + GitReference::Tag(ref s) => write!(f, "tag={}", s), + GitReference::Rev(ref s) => write!(f, "rev={}", s), + } + } +} + +#[cfg(test)] +mod tests { + use super::{GitReference, Kind, SourceId}; + use crate::util::IntoUrl; + + #[test] + fn github_sources_equal() { + let loc = "https://github.com/foo/bar".into_url().unwrap(); + let master = Kind::Git(GitReference::Branch("master".to_string())); + let s1 = SourceId::new(master.clone(), loc).unwrap(); + + let loc = "git://github.com/foo/bar".into_url().unwrap(); + let s2 = SourceId::new(master, loc.clone()).unwrap(); + + assert_eq!(s1, s2); + + let foo = Kind::Git(GitReference::Branch("foo".to_string())); + let s3 = SourceId::new(foo, loc).unwrap(); + assert_ne!(s1, s3); + } +} diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index 81445d2a3cb..0dbfd2456b3 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -1,107 +1,423 @@ -use std::collections::HashMap; +use std::borrow::Borrow; +use std::collections::{BTreeMap, HashMap}; +use std::fmt::Display; +use std::hash::{Hash, Hasher}; use std::mem; +use std::rc::Rc; +use serde::{Serialize, Serializer}; + +use crate::core::interning::InternedString; +use crate::core::{Dependency, PackageId, SourceId}; use semver::Version; -use core::{Dependency, PackageId, SourceId}; -use util::{CargoResult, human}; +use crate::util::{CargoResult, Platform}; -/// Subset of a `Manifest`. Contains only the most important informations about +/// Subset of a `Manifest`. Contains only the most important information about /// a package. /// /// Summaries are cloned, and should not be mutated after creation -#[derive(Debug,Clone)] +#[derive(Debug, Clone)] pub struct Summary { + inner: Rc, +} + +#[derive(Debug, Clone)] +struct Inner { package_id: PackageId, dependencies: Vec, - features: HashMap>, + features: FeatureMap, + checksum: Option, + links: Option, + namespaced_features: bool, } impl Summary { - pub fn new(pkg_id: PackageId, - dependencies: Vec, - features: HashMap>) -> CargoResult { + pub fn new( + pkg_id: PackageId, + dependencies: Vec, + features: &BTreeMap, Vec>)>, + links: Option>, + namespaced_features: bool, + ) -> CargoResult + where + K: Borrow + Ord + Display, + { for dep in dependencies.iter() { - if features.get(dep.name()).is_some() { - return Err(human(format!("Features and dependencies cannot have \ - the same name: `{}`", dep.name()))) + let feature = dep.name_in_toml(); + if !namespaced_features && features.get(&*feature).is_some() { + failure::bail!( + "Features and dependencies cannot have the \ + same name: `{}`", + feature + ) } if dep.is_optional() && !dep.is_transitive() { - return Err(human(format!("Dev-dependencies are not allowed \ - to be optional: `{}`", - dep.name()))) - } - } - for (feature, list) in features.iter() { - for dep in list.iter() { - let mut parts = dep.splitn(2, '/'); - let dep = parts.next().unwrap(); - let is_reexport = parts.next().is_some(); - if !is_reexport && features.get(dep).is_some() { continue } - match dependencies.iter().find(|d| d.name() == dep) { - Some(d) => { - if d.is_optional() || is_reexport { continue } - return Err(human(format!("Feature `{}` depends on `{}` \ - which is not an optional \ - dependency.\nConsider adding \ - `optional = true` to the \ - dependency", feature, dep))) - } - None if is_reexport => { - return Err(human(format!("Feature `{}` requires `{}` \ - which is not an optional \ - dependency", feature, dep))) - } - None => { - return Err(human(format!("Feature `{}` includes `{}` \ - which is neither a dependency \ - nor another feature", - feature, dep))) - } - } + failure::bail!( + "Dev-dependencies are not allowed to be optional: `{}`", + feature + ) } } + let feature_map = build_feature_map(features, &dependencies, namespaced_features)?; Ok(Summary { - package_id: pkg_id, - dependencies: dependencies, - features: features, + inner: Rc::new(Inner { + package_id: pkg_id, + dependencies, + features: feature_map, + checksum: None, + links: links.map(|l| InternedString::new(l.as_ref())), + namespaced_features, + }), }) } - pub fn package_id(&self) -> &PackageId { &self.package_id } - pub fn name(&self) -> &str { self.package_id().name() } - pub fn version(&self) -> &Version { self.package_id().version() } - pub fn source_id(&self) -> &SourceId { self.package_id.source_id() } - pub fn dependencies(&self) -> &[Dependency] { &self.dependencies } - pub fn features(&self) -> &HashMap> { &self.features } + pub fn package_id(&self) -> PackageId { + self.inner.package_id + } + pub fn name(&self) -> InternedString { + self.package_id().name() + } + pub fn version(&self) -> &Version { + self.package_id().version() + } + pub fn source_id(&self) -> SourceId { + self.package_id().source_id() + } + pub fn dependencies(&self) -> &[Dependency] { + &self.inner.dependencies + } + pub fn features(&self) -> &FeatureMap { + &self.inner.features + } + pub fn checksum(&self) -> Option<&str> { + self.inner.checksum.as_ref().map(|s| &s[..]) + } + pub fn links(&self) -> Option { + self.inner.links + } + pub fn namespaced_features(&self) -> bool { + self.inner.namespaced_features + } pub fn override_id(mut self, id: PackageId) -> Summary { - self.package_id = id; + Rc::make_mut(&mut self.inner).package_id = id; self } + pub fn set_checksum(&mut self, cksum: String) { + Rc::make_mut(&mut self.inner).checksum = Some(cksum); + } + pub fn map_dependencies(mut self, f: F) -> Summary - where F: FnMut(Dependency) -> Dependency { - let deps = mem::replace(&mut self.dependencies, Vec::new()); - self.dependencies = deps.into_iter().map(f).collect(); + where + F: FnMut(Dependency) -> Dependency, + { + { + let slot = &mut Rc::make_mut(&mut self.inner).dependencies; + let deps = mem::replace(slot, Vec::new()); + *slot = deps.into_iter().map(f).collect(); + } self } + + pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Summary { + let me = if self.package_id().source_id() == to_replace { + let new_id = self.package_id().with_source_id(replace_with); + self.override_id(new_id) + } else { + self + }; + me.map_dependencies(|dep| dep.map_source(to_replace, replace_with)) + } } impl PartialEq for Summary { fn eq(&self, other: &Summary) -> bool { - self.package_id == other.package_id + self.inner.package_id == other.inner.package_id + } +} + +impl Eq for Summary {} + +impl Hash for Summary { + fn hash(&self, state: &mut H) { + self.inner.package_id.hash(state); + } +} + +// Checks features for errors, bailing out a CargoResult:Err if invalid, +// and creates FeatureValues for each feature. +fn build_feature_map( + features: &BTreeMap, Vec>)>, + dependencies: &[Dependency], + namespaced: bool, +) -> CargoResult +where + K: Borrow + Ord + Display, +{ + use self::FeatureValue::*; + let mut dep_map = HashMap::new(); + for dep in dependencies.iter() { + dep_map + .entry(dep.name_in_toml()) + .or_insert_with(Vec::new) + .push(dep); + } + + let mut map = BTreeMap::new(); + for (feature, list) in features.iter() { + // If namespaced features is active and the key is the same as that of an + // optional dependency, that dependency must be included in the values. + // Thus, if a `feature` is found that has the same name as a dependency, we + // (a) bail out if the dependency is non-optional, and (b) we track if the + // feature requirements include the dependency `crate:feature` in the list. + // This is done with the `dependency_found` variable, which can only be + // false if features are namespaced and the current feature key is the same + // as the name of an optional dependency. If so, it gets set to true during + // iteration over the list if the dependency is found in the list. + let mut dependency_found = if namespaced { + match dep_map.get(feature.borrow()) { + Some(dep_data) => { + if !dep_data.iter().any(|d| d.is_optional()) { + failure::bail!( + "Feature `{}` includes the dependency of the same name, but this is \ + left implicit in the features included by this feature.\n\ + Additionally, the dependency must be marked as optional to be \ + included in the feature definition.\n\ + Consider adding `crate:{}` to this feature's requirements \ + and marking the dependency as `optional = true`", + feature, + feature + ) + } else { + false + } + } + None => true, + } + } else { + true + }; + + let mut values = vec![]; + for dep in list.1.as_slice() { + let val = FeatureValue::build( + InternedString::new(dep.as_ref()), + |fs| features.contains_key(fs.as_str()), + namespaced, + ); + + // Find data for the referenced dependency... + let dep_data = { + match val { + Feature(ref dep_name) | Crate(ref dep_name) | CrateFeature(ref dep_name, _) => { + dep_map.get(dep_name.as_str()) + } + } + }; + let is_optional_dep = dep_data + .iter() + .flat_map(|d| d.iter()) + .any(|d| d.is_optional()); + if let FeatureValue::Crate(ref dep_name) = val { + // If we have a dependency value, check if this is the dependency named + // the same as the feature that we were looking for. + if !dependency_found && feature.borrow() == dep_name.as_str() { + dependency_found = true; + } + } + + match (&val, dep_data.is_some(), is_optional_dep) { + // The value is a feature. If features are namespaced, this just means + // it's not prefixed with `crate:`, so we have to check whether the + // feature actually exist. If the feature is not defined *and* an optional + // dependency of the same name exists, the feature is defined implicitly + // here by adding it to the feature map, pointing to the dependency. + // If features are not namespaced, it's been validated as a feature already + // while instantiating the `FeatureValue` in `FeatureValue::build()`, so + // we don't have to do so here. + (&Feature(feat), _, true) => { + if namespaced && !features.contains_key(&*feat) { + map.insert(feat, (list.0.clone(), vec![FeatureValue::Crate(feat)])); + } + } + // If features are namespaced and the value is not defined as a feature + // and there is no optional dependency of the same name, error out. + // If features are not namespaced, there must be an existing feature + // here (checked by `FeatureValue::build()`), so it will always be defined. + (&Feature(feat), dep_exists, false) => { + if namespaced && !features.contains_key(&*feat) { + if dep_exists { + failure::bail!( + "Feature `{}` includes `{}` which is not defined as a feature.\n\ + A non-optional dependency of the same name is defined; consider \ + adding `optional = true` to its definition", + feature, + feat + ) + } else { + failure::bail!( + "Feature `{}` includes `{}` which is not defined as a feature", + feature, + feat + ) + } + } + } + // The value is a dependency. If features are namespaced, it is explicitly + // tagged as such (`crate:value`). If features are not namespaced, any value + // not recognized as a feature is pegged as a `Crate`. Here we handle the case + // where the dependency exists but is non-optional. It branches on namespaced + // just to provide the correct string for the crate dependency in the error. + (&Crate(ref dep), true, false) => { + if namespaced { + failure::bail!( + "Feature `{}` includes `crate:{}` which is not an \ + optional dependency.\nConsider adding \ + `optional = true` to the dependency", + feature, + dep + ) + } else { + failure::bail!( + "Feature `{}` depends on `{}` which is not an \ + optional dependency.\nConsider adding \ + `optional = true` to the dependency", + feature, + dep + ) + } + } + // If namespaced, the value was tagged as a dependency; if not namespaced, + // this could be anything not defined as a feature. This handles the case + // where no such dependency is actually defined; again, the branch on + // namespaced here is just to provide the correct string in the error. + (&Crate(ref dep), false, _) => { + if namespaced { + failure::bail!( + "Feature `{}` includes `crate:{}` which is not a known \ + dependency", + feature, + dep + ) + } else { + failure::bail!( + "Feature `{}` includes `{}` which is neither a dependency nor \ + another feature", + feature, + dep + ) + } + } + (&Crate(_), true, true) => {} + // If the value is a feature for one of the dependencies, bail out if no such + // dependency is actually defined in the manifest. + (&CrateFeature(ref dep, _), false, _) => failure::bail!( + "Feature `{}` requires a feature of `{}` which is not a \ + dependency", + feature, + dep + ), + (&CrateFeature(_, _), true, _) => {} + } + values.push(val); + } + + if !dependency_found { + // If we have not found the dependency of the same-named feature, we should + // bail here. + failure::bail!( + "Feature `{}` includes the optional dependency of the \ + same name, but this is left implicit in the features \ + included by this feature.\nConsider adding \ + `crate:{}` to this feature's requirements.", + feature, + feature + ) + } + + map.insert( + InternedString::new(feature.borrow()), + (list.0.clone(), values), + ); } + Ok(map) } -pub trait SummaryVec { - fn names(&self) -> Vec; +/// FeatureValue represents the types of dependencies a feature can have: +/// +/// * Another feature +/// * An optional dependency +/// * A feature in a dependency +/// +/// The selection between these 3 things happens as part of the construction of the FeatureValue. +#[derive(Clone, Debug)] +pub enum FeatureValue { + Feature(InternedString), + Crate(InternedString), + CrateFeature(InternedString, InternedString), } -impl SummaryVec for Vec { - // TODO: Move to Registry - fn names(&self) -> Vec { - self.iter().map(|summary| summary.name().to_string()).collect() +impl FeatureValue { + fn build(feature: InternedString, is_feature: T, namespaced: bool) -> FeatureValue + where + T: Fn(InternedString) -> bool, + { + match (feature.find('/'), namespaced) { + (Some(pos), _) => { + let (dep, dep_feat) = feature.split_at(pos); + let dep_feat = &dep_feat[1..]; + FeatureValue::CrateFeature(InternedString::new(dep), InternedString::new(dep_feat)) + } + (None, true) if feature.starts_with("crate:") => { + FeatureValue::Crate(InternedString::new(&feature[6..])) + } + (None, true) => FeatureValue::Feature(feature), + (None, false) if is_feature(feature) => FeatureValue::Feature(feature), + (None, false) => FeatureValue::Crate(feature), + } + } + + pub fn new(feature: InternedString, s: &Summary) -> FeatureValue { + Self::build( + feature, + |fs| s.features().contains_key(&fs), + s.namespaced_features(), + ) + } + + pub fn to_string(&self, s: &Summary) -> String { + use self::FeatureValue::*; + match *self { + Feature(ref f) => f.to_string(), + Crate(ref c) => { + if s.namespaced_features() { + format!("crate:{}", &c) + } else { + c.to_string() + } + } + CrateFeature(ref c, ref f) => [c.as_ref(), f.as_ref()].join("/"), + } } +} +impl Serialize for FeatureValue { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + use self::FeatureValue::*; + match *self { + Feature(ref f) => serializer.serialize_str(f), + Crate(ref c) => serializer.serialize_str(c), + CrateFeature(ref c, ref f) => { + serializer.serialize_str(&[c.as_ref(), f.as_ref()].join("/")) + } + } + } } + +pub type FeatureMap = BTreeMap, Vec)>; +pub type RefFeatureMap<'a> = BTreeMap; diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs new file mode 100644 index 00000000000..c36f10d33c1 --- /dev/null +++ b/src/cargo/core/workspace.rs @@ -0,0 +1,943 @@ +use std::cell::RefCell; +use std::collections::hash_map::{Entry, HashMap}; +use std::collections::{BTreeMap, HashSet}; +use std::path::{Path, PathBuf}; +use std::slice; + +use glob::glob; +use log::debug; +use url::Url; + +use crate::core::features::Features; +use crate::core::profiles::Profiles; +use crate::core::registry::PackageRegistry; +use crate::core::{Dependency, PackageId, PackageIdSpec}; +use crate::core::{EitherManifest, Package, SourceId, VirtualManifest}; +use crate::ops; +use crate::sources::PathSource; +use crate::util::errors::{CargoResult, CargoResultExt, ManifestError}; +use crate::util::paths; +use crate::util::toml::read_manifest; +use crate::util::{Config, Filesystem}; + +/// The core abstraction in Cargo for working with a workspace of crates. +/// +/// A workspace is often created very early on and then threaded through all +/// other functions. It's typically through this object that the current +/// package is loaded and/or learned about. +#[derive(Debug)] +pub struct Workspace<'cfg> { + config: &'cfg Config, + + // This path is a path to where the current cargo subcommand was invoked + // from. That is the `--manifest-path` argument to Cargo, and + // points to the "main crate" that we're going to worry about. + current_manifest: PathBuf, + + // A list of packages found in this workspace. Always includes at least the + // package mentioned by `current_manifest`. + packages: Packages<'cfg>, + + // If this workspace includes more than one crate, this points to the root + // of the workspace. This is `None` in the case that `[workspace]` is + // missing, `package.workspace` is missing, and no `Cargo.toml` above + // `current_manifest` was found on the filesystem with `[workspace]`. + root_manifest: Option, + + // Shared target directory for all the packages of this workspace. + // `None` if the default path of `root/target` should be used. + target_dir: Option, + + // List of members in this workspace with a listing of all their manifest + // paths. The packages themselves can be looked up through the `packages` + // set above. + members: Vec, + member_ids: HashSet, + + // The subset of `members` that are used by the + // `build`, `check`, `test`, and `bench` subcommands + // when no package is selected with `--package` / `-p` and `--all` + // is not used. + // + // This is set by the `default-members` config + // in the `[workspace]` section. + // When unset, this is the same as `members` for virtual workspaces + // (`--all` is implied) + // or only the root package for non-virtual workspaces. + default_members: Vec, + + // `true` if this is a temporary workspace created for the purposes of the + // `cargo install` or `cargo package` commands. + is_ephemeral: bool, + + // `true` if this workspace should enforce optional dependencies even when + // not needed; false if this workspace should only enforce dependencies + // needed by the current configuration (such as in cargo install). In some + // cases `false` also results in the non-enforcement of dev-dependencies. + require_optional_deps: bool, + + // A cache of loaded packages for particular paths which is disjoint from + // `packages` up above, used in the `load` method down below. + loaded_packages: RefCell>, + + // If `true`, then the resolver will ignore any existing `Cargo.lock` + // file. This is set for `cargo install` without `--locked`. + ignore_lock: bool, +} + +// Separate structure for tracking loaded packages (to avoid loading anything +// twice), and this is separate to help appease the borrow checker. +#[derive(Debug)] +struct Packages<'cfg> { + config: &'cfg Config, + packages: HashMap, +} + +#[derive(Debug)] +enum MaybePackage { + Package(Package), + Virtual(VirtualManifest), +} + +/// Configuration of a workspace in a manifest. +#[derive(Debug, Clone)] +pub enum WorkspaceConfig { + /// Indicates that `[workspace]` was present and the members were + /// optionally specified as well. + Root(WorkspaceRootConfig), + + /// Indicates that `[workspace]` was present and the `root` field is the + /// optional value of `package.workspace`, if present. + Member { root: Option }, +} + +/// Intermediate configuration of a workspace root in a manifest. +/// +/// Knows the Workspace Root path, as well as `members` and `exclude` lists of path patterns, which +/// together tell if some path is recognized as a member by this root or not. +#[derive(Debug, Clone)] +pub struct WorkspaceRootConfig { + root_dir: PathBuf, + members: Option>, + default_members: Option>, + exclude: Vec, +} + +/// An iterator over the member packages of a workspace, returned by +/// `Workspace::members` +pub struct Members<'a, 'cfg> { + ws: &'a Workspace<'cfg>, + iter: slice::Iter<'a, PathBuf>, +} + +impl<'cfg> Workspace<'cfg> { + /// Creates a new workspace given the target manifest pointed to by + /// `manifest_path`. + /// + /// This function will construct the entire workspace by determining the + /// root and all member packages. It will then validate the workspace + /// before returning it, so `Ok` is only returned for valid workspaces. + pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult> { + let target_dir = config.target_dir()?; + + let mut ws = Workspace { + config, + current_manifest: manifest_path.to_path_buf(), + packages: Packages { + config, + packages: HashMap::new(), + }, + root_manifest: None, + target_dir, + members: Vec::new(), + member_ids: HashSet::new(), + default_members: Vec::new(), + is_ephemeral: false, + require_optional_deps: true, + loaded_packages: RefCell::new(HashMap::new()), + ignore_lock: false, + }; + ws.root_manifest = ws.find_root(manifest_path)?; + ws.find_members()?; + ws.validate()?; + Ok(ws) + } + + /// Creates a "temporary workspace" from one package which only contains + /// that package. + /// + /// This constructor will not touch the filesystem and only creates an + /// in-memory workspace. That is, all configuration is ignored, it's just + /// intended for that one package. + /// + /// This is currently only used in niche situations like `cargo install` or + /// `cargo package`. + pub fn ephemeral( + package: Package, + config: &'cfg Config, + target_dir: Option, + require_optional_deps: bool, + ) -> CargoResult> { + let mut ws = Workspace { + config, + current_manifest: package.manifest_path().to_path_buf(), + packages: Packages { + config, + packages: HashMap::new(), + }, + root_manifest: None, + target_dir: None, + members: Vec::new(), + member_ids: HashSet::new(), + default_members: Vec::new(), + is_ephemeral: true, + require_optional_deps, + loaded_packages: RefCell::new(HashMap::new()), + ignore_lock: false, + }; + { + let key = ws.current_manifest.parent().unwrap(); + let id = package.package_id(); + let package = MaybePackage::Package(package); + ws.packages.packages.insert(key.to_path_buf(), package); + ws.target_dir = if let Some(dir) = target_dir { + Some(dir) + } else { + ws.config.target_dir()? + }; + ws.members.push(ws.current_manifest.clone()); + ws.member_ids.insert(id); + ws.default_members.push(ws.current_manifest.clone()); + } + Ok(ws) + } + + /// Returns the current package of this workspace. + /// + /// Note that this can return an error if it the current manifest is + /// actually a "virtual Cargo.toml", in which case an error is returned + /// indicating that something else should be passed. + pub fn current(&self) -> CargoResult<&Package> { + let pkg = self.current_opt().ok_or_else(|| { + failure::format_err!( + "manifest path `{}` is a virtual manifest, but this \ + command requires running against an actual package in \ + this workspace", + self.current_manifest.display() + ) + })?; + Ok(pkg) + } + + pub fn current_opt(&self) -> Option<&Package> { + match *self.packages.get(&self.current_manifest) { + MaybePackage::Package(ref p) => Some(p), + MaybePackage::Virtual(..) => None, + } + } + + pub fn is_virtual(&self) -> bool { + match *self.packages.get(&self.current_manifest) { + MaybePackage::Package(..) => false, + MaybePackage::Virtual(..) => true, + } + } + + /// Returns the `Config` this workspace is associated with. + pub fn config(&self) -> &'cfg Config { + self.config + } + + pub fn profiles(&self) -> &Profiles { + match self.root_maybe() { + MaybePackage::Package(p) => p.manifest().profiles(), + MaybePackage::Virtual(vm) => vm.profiles(), + } + } + + /// Returns the root path of this workspace. + /// + /// That is, this returns the path of the directory containing the + /// `Cargo.toml` which is the root of this workspace. + pub fn root(&self) -> &Path { + match self.root_manifest { + Some(ref p) => p, + None => &self.current_manifest, + } + .parent() + .unwrap() + } + + /// Returns the root Package or VirtualManifest. + fn root_maybe(&self) -> &MaybePackage { + let root = self + .root_manifest + .as_ref() + .unwrap_or(&self.current_manifest); + self.packages.get(root) + } + + pub fn target_dir(&self) -> Filesystem { + self.target_dir + .clone() + .unwrap_or_else(|| Filesystem::new(self.root().join("target"))) + } + + /// Returns the root `[replace]` section of this workspace. + /// + /// This may be from a virtual crate or an actual crate. + pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] { + match self.root_maybe() { + MaybePackage::Package(p) => p.manifest().replace(), + MaybePackage::Virtual(vm) => vm.replace(), + } + } + + /// Returns the root `[patch]` section of this workspace. + /// + /// This may be from a virtual crate or an actual crate. + pub fn root_patch(&self) -> &HashMap> { + match self.root_maybe() { + MaybePackage::Package(p) => p.manifest().patch(), + MaybePackage::Virtual(vm) => vm.patch(), + } + } + + /// Returns an iterator over all packages in this workspace + pub fn members<'a>(&'a self) -> Members<'a, 'cfg> { + Members { + ws: self, + iter: self.members.iter(), + } + } + + /// Returns an iterator over default packages in this workspace + pub fn default_members<'a>(&'a self) -> Members<'a, 'cfg> { + Members { + ws: self, + iter: self.default_members.iter(), + } + } + + /// Returns true if the package is a member of the workspace. + pub fn is_member(&self, pkg: &Package) -> bool { + self.member_ids.contains(&pkg.package_id()) + } + + pub fn is_ephemeral(&self) -> bool { + self.is_ephemeral + } + + pub fn require_optional_deps(&self) -> bool { + self.require_optional_deps + } + + pub fn set_require_optional_deps( + &mut self, + require_optional_deps: bool, + ) -> &mut Workspace<'cfg> { + self.require_optional_deps = require_optional_deps; + self + } + + pub fn ignore_lock(&self) -> bool { + self.ignore_lock + } + + pub fn set_ignore_lock(&mut self, ignore_lock: bool) -> &mut Workspace<'cfg> { + self.ignore_lock = ignore_lock; + self + } + + /// Finds the root of a workspace for the crate whose manifest is located + /// at `manifest_path`. + /// + /// This will parse the `Cargo.toml` at `manifest_path` and then interpret + /// the workspace configuration, optionally walking up the filesystem + /// looking for other workspace roots. + /// + /// Returns an error if `manifest_path` isn't actually a valid manifest or + /// if some other transient error happens. + fn find_root(&mut self, manifest_path: &Path) -> CargoResult> { + fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult { + let path = member_manifest + .parent() + .unwrap() + .join(root_link) + .join("Cargo.toml"); + debug!("find_root - pointer {}", path.display()); + Ok(paths::normalize_path(&path)) + }; + + { + let current = self.packages.load(manifest_path)?; + match *current.workspace_config() { + WorkspaceConfig::Root(_) => { + debug!("find_root - is root {}", manifest_path.display()); + return Ok(Some(manifest_path.to_path_buf())); + } + WorkspaceConfig::Member { + root: Some(ref path_to_root), + } => return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)), + WorkspaceConfig::Member { root: None } => {} + } + } + + for path in paths::ancestors(manifest_path).skip(2) { + if path.ends_with("target/package") { + break; + } + + let ances_manifest_path = path.join("Cargo.toml"); + debug!("find_root - trying {}", ances_manifest_path.display()); + if ances_manifest_path.exists() { + match *self.packages.load(&ances_manifest_path)?.workspace_config() { + WorkspaceConfig::Root(ref ances_root_config) => { + debug!("find_root - found a root checking exclusion"); + if !ances_root_config.is_excluded(manifest_path) { + debug!("find_root - found!"); + return Ok(Some(ances_manifest_path)); + } + } + WorkspaceConfig::Member { + root: Some(ref path_to_root), + } => { + debug!("find_root - found pointer"); + return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root)?)); + } + WorkspaceConfig::Member { .. } => {} + } + } + + // Don't walk across `CARGO_HOME` when we're looking for the + // workspace root. Sometimes a package will be organized with + // `CARGO_HOME` pointing inside of the workspace root or in the + // current package, but we don't want to mistakenly try to put + // crates.io crates into the workspace by accident. + if self.config.home() == path { + break; + } + } + + Ok(None) + } + + /// After the root of a workspace has been located, probes for all members + /// of a workspace. + /// + /// If the `workspace.members` configuration is present, then this just + /// verifies that those are all valid packages to point to. Otherwise, this + /// will transitively follow all `path` dependencies looking for members of + /// the workspace. + fn find_members(&mut self) -> CargoResult<()> { + let root_manifest_path = match self.root_manifest { + Some(ref path) => path.clone(), + None => { + debug!("find_members - only me as a member"); + self.members.push(self.current_manifest.clone()); + self.default_members.push(self.current_manifest.clone()); + if let Ok(pkg) = self.current() { + let id = pkg.package_id(); + self.member_ids.insert(id); + } + return Ok(()); + } + }; + + let members_paths; + let default_members_paths; + { + let root_package = self.packages.load(&root_manifest_path)?; + match *root_package.workspace_config() { + WorkspaceConfig::Root(ref root_config) => { + members_paths = root_config + .members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?; + default_members_paths = if let Some(ref default) = root_config.default_members { + Some(root_config.members_paths(default)?) + } else { + None + } + } + _ => failure::bail!( + "root of a workspace inferred but wasn't a root: {}", + root_manifest_path.display() + ), + } + } + + for path in members_paths { + self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false)?; + } + + if let Some(default) = default_members_paths { + for path in default { + let manifest_path = paths::normalize_path(&path.join("Cargo.toml")); + if !self.members.contains(&manifest_path) { + failure::bail!( + "package `{}` is listed in workspace’s default-members \ + but is not a member.", + path.display() + ) + } + self.default_members.push(manifest_path) + } + } else if self.is_virtual() { + self.default_members = self.members.clone() + } else { + self.default_members.push(self.current_manifest.clone()) + } + + self.find_path_deps(&root_manifest_path, &root_manifest_path, false) + } + + fn find_path_deps( + &mut self, + manifest_path: &Path, + root_manifest: &Path, + is_path_dep: bool, + ) -> CargoResult<()> { + let manifest_path = paths::normalize_path(manifest_path); + if self.members.contains(&manifest_path) { + return Ok(()); + } + if is_path_dep + && !manifest_path.parent().unwrap().starts_with(self.root()) + && self.find_root(&manifest_path)? != self.root_manifest + { + // If `manifest_path` is a path dependency outside of the workspace, + // don't add it, or any of its dependencies, as a members. + return Ok(()); + } + + if let WorkspaceConfig::Root(ref root_config) = + *self.packages.load(root_manifest)?.workspace_config() + { + if root_config.is_excluded(&manifest_path) { + return Ok(()); + } + } + + debug!("find_members - {}", manifest_path.display()); + self.members.push(manifest_path.clone()); + + let candidates = { + let pkg = match *self.packages.load(&manifest_path)? { + MaybePackage::Package(ref p) => p, + MaybePackage::Virtual(_) => return Ok(()), + }; + self.member_ids.insert(pkg.package_id()); + pkg.dependencies() + .iter() + .map(|d| d.source_id()) + .filter(|d| d.is_path()) + .filter_map(|d| d.url().to_file_path().ok()) + .map(|p| p.join("Cargo.toml")) + .collect::>() + }; + for candidate in candidates { + self.find_path_deps(&candidate, root_manifest, true) + .map_err(|err| ManifestError::new(err, manifest_path.clone()))?; + } + Ok(()) + } + + pub fn features(&self) -> &Features { + match self.root_maybe() { + MaybePackage::Package(p) => p.manifest().features(), + MaybePackage::Virtual(vm) => vm.features(), + } + } + + /// Validates a workspace, ensuring that a number of invariants are upheld: + /// + /// 1. A workspace only has one root. + /// 2. All workspace members agree on this one root as the root. + /// 3. The current crate is a member of this workspace. + fn validate(&mut self) -> CargoResult<()> { + // Validate config profiles only once per workspace. + let features = self.features(); + let mut warnings = Vec::new(); + self.config.profiles()?.validate(features, &mut warnings)?; + for warning in warnings { + self.config.shell().warn(&warning)?; + } + + // The rest of the checks require a VirtualManifest or multiple members. + if self.root_manifest.is_none() { + return Ok(()); + } + + let mut roots = Vec::new(); + { + let mut names = BTreeMap::new(); + for member in self.members.iter() { + let package = self.packages.get(member); + match *package.workspace_config() { + WorkspaceConfig::Root(_) => { + roots.push(member.parent().unwrap().to_path_buf()); + } + WorkspaceConfig::Member { .. } => {} + } + let name = match *package { + MaybePackage::Package(ref p) => p.name(), + MaybePackage::Virtual(_) => continue, + }; + if let Some(prev) = names.insert(name, member) { + failure::bail!( + "two packages named `{}` in this workspace:\n\ + - {}\n\ + - {}", + name, + prev.display(), + member.display() + ); + } + } + } + + match roots.len() { + 0 => failure::bail!( + "`package.workspace` configuration points to a crate \ + which is not configured with [workspace]: \n\ + configuration at: {}\n\ + points to: {}", + self.current_manifest.display(), + self.root_manifest.as_ref().unwrap().display() + ), + 1 => {} + _ => { + failure::bail!( + "multiple workspace roots found in the same workspace:\n{}", + roots + .iter() + .map(|r| format!(" {}", r.display())) + .collect::>() + .join("\n") + ); + } + } + + for member in self.members.clone() { + let root = self.find_root(&member)?; + if root == self.root_manifest { + continue; + } + + match root { + Some(root) => { + failure::bail!( + "package `{}` is a member of the wrong workspace\n\ + expected: {}\n\ + actual: {}", + member.display(), + self.root_manifest.as_ref().unwrap().display(), + root.display() + ); + } + None => { + failure::bail!( + "workspace member `{}` is not hierarchically below \ + the workspace root `{}`", + member.display(), + self.root_manifest.as_ref().unwrap().display() + ); + } + } + } + + if !self.members.contains(&self.current_manifest) { + let root = self.root_manifest.as_ref().unwrap(); + let root_dir = root.parent().unwrap(); + let current_dir = self.current_manifest.parent().unwrap(); + let root_pkg = self.packages.get(root); + + // FIXME: Make this more generic by using a relative path resolver between member and + // root. + let members_msg = match current_dir.strip_prefix(root_dir) { + Ok(rel) => format!( + "this may be fixable by adding `{}` to the \ + `workspace.members` array of the manifest \ + located at: {}", + rel.display(), + root.display() + ), + Err(_) => format!( + "this may be fixable by adding a member to \ + the `workspace.members` array of the \ + manifest located at: {}", + root.display() + ), + }; + let extra = match *root_pkg { + MaybePackage::Virtual(_) => members_msg, + MaybePackage::Package(ref p) => { + let has_members_list = match *p.manifest().workspace_config() { + WorkspaceConfig::Root(ref root_config) => root_config.has_members_list(), + WorkspaceConfig::Member { .. } => unreachable!(), + }; + if !has_members_list { + format!( + "this may be fixable by ensuring that this \ + crate is depended on by the workspace \ + root: {}", + root.display() + ) + } else { + members_msg + } + } + }; + failure::bail!( + "current package believes it's in a workspace when it's not:\n\ + current: {}\n\ + workspace: {}\n\n{}\n\ + Alternatively, to keep it out of the workspace, add the package \ + to the `workspace.exclude` array, or add an empty `[workspace]` \ + table to the package's manifest.", + self.current_manifest.display(), + root.display(), + extra + ); + } + + if let Some(ref root_manifest) = self.root_manifest { + for pkg in self + .members() + .filter(|p| p.manifest_path() != root_manifest) + { + let manifest = pkg.manifest(); + let emit_warning = |what| -> CargoResult<()> { + let msg = format!( + "{} for the non root package will be ignored, \ + specify {} at the workspace root:\n\ + package: {}\n\ + workspace: {}", + what, + what, + pkg.manifest_path().display(), + root_manifest.display(), + ); + self.config.shell().warn(&msg) + }; + if manifest.original().has_profiles() { + emit_warning("profiles")?; + } + if !manifest.replace().is_empty() { + emit_warning("replace")?; + } + if !manifest.patch().is_empty() { + emit_warning("patch")?; + } + } + } + + Ok(()) + } + + pub fn load(&self, manifest_path: &Path) -> CargoResult { + match self.packages.maybe_get(manifest_path) { + Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()), + Some(&MaybePackage::Virtual(_)) => failure::bail!("cannot load workspace root"), + None => {} + } + + let mut loaded = self.loaded_packages.borrow_mut(); + if let Some(p) = loaded.get(manifest_path).cloned() { + return Ok(p); + } + let source_id = SourceId::for_path(manifest_path.parent().unwrap())?; + let (package, _nested_paths) = ops::read_package(manifest_path, source_id, self.config)?; + loaded.insert(manifest_path.to_path_buf(), package.clone()); + Ok(package) + } + + /// Preload the provided registry with already loaded packages. + /// + /// A workspace may load packages during construction/parsing/early phases + /// for various operations, and this preload step avoids doubly-loading and + /// parsing crates on the filesystem by inserting them all into the registry + /// with their in-memory formats. + pub fn preload(&self, registry: &mut PackageRegistry<'cfg>) { + // These can get weird as this generally represents a workspace during + // `cargo install`. Things like git repositories will actually have a + // `PathSource` with multiple entries in it, so the logic below is + // mostly just an optimization for normal `cargo build` in workspaces + // during development. + if self.is_ephemeral { + return; + } + + for pkg in self.packages.packages.values() { + let pkg = match *pkg { + MaybePackage::Package(ref p) => p.clone(), + MaybePackage::Virtual(_) => continue, + }; + let mut src = PathSource::new( + pkg.manifest_path(), + pkg.package_id().source_id(), + self.config, + ); + src.preload_with(pkg); + registry.add_preloaded(Box::new(src)); + } + } + + pub fn emit_warnings(&self) -> CargoResult<()> { + for (path, maybe_pkg) in &self.packages.packages { + let warnings = match maybe_pkg { + MaybePackage::Package(pkg) => pkg.manifest().warnings().warnings(), + MaybePackage::Virtual(vm) => vm.warnings().warnings(), + }; + let path = path.join("Cargo.toml"); + for warning in warnings { + if warning.is_critical { + let err = failure::format_err!("{}", warning.message); + let cx = + failure::format_err!("failed to parse manifest at `{}`", path.display()); + return Err(err.context(cx).into()); + } else { + let msg = if self.root_manifest.is_none() { + warning.message.to_string() + } else { + // In a workspace, it can be confusing where a warning + // originated, so include the path. + format!("{}: {}", path.display(), warning.message) + }; + self.config.shell().warn(msg)? + } + } + } + Ok(()) + } +} + +impl<'cfg> Packages<'cfg> { + fn get(&self, manifest_path: &Path) -> &MaybePackage { + self.maybe_get(manifest_path).unwrap() + } + + fn maybe_get(&self, manifest_path: &Path) -> Option<&MaybePackage> { + self.packages.get(manifest_path.parent().unwrap()) + } + + fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> { + let key = manifest_path.parent().unwrap(); + match self.packages.entry(key.to_path_buf()) { + Entry::Occupied(e) => Ok(e.into_mut()), + Entry::Vacant(v) => { + let source_id = SourceId::for_path(key)?; + let (manifest, _nested_paths) = + read_manifest(manifest_path, source_id, self.config)?; + Ok(v.insert(match manifest { + EitherManifest::Real(manifest) => { + MaybePackage::Package(Package::new(manifest, manifest_path)) + } + EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm), + })) + } + } + } +} + +impl<'a, 'cfg> Iterator for Members<'a, 'cfg> { + type Item = &'a Package; + + fn next(&mut self) -> Option<&'a Package> { + loop { + let next = self.iter.next().map(|path| self.ws.packages.get(path)); + match next { + Some(&MaybePackage::Package(ref p)) => return Some(p), + Some(&MaybePackage::Virtual(_)) => {} + None => return None, + } + } + } + + fn size_hint(&self) -> (usize, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) + } +} + +impl MaybePackage { + fn workspace_config(&self) -> &WorkspaceConfig { + match *self { + MaybePackage::Package(ref p) => p.manifest().workspace_config(), + MaybePackage::Virtual(ref vm) => vm.workspace_config(), + } + } +} + +impl WorkspaceRootConfig { + /// Creates a new Intermediate Workspace Root configuration. + pub fn new( + root_dir: &Path, + members: &Option>, + default_members: &Option>, + exclude: &Option>, + ) -> WorkspaceRootConfig { + WorkspaceRootConfig { + root_dir: root_dir.to_path_buf(), + members: members.clone(), + default_members: default_members.clone(), + exclude: exclude.clone().unwrap_or_default(), + } + } + + /// Checks the path against the `excluded` list. + /// + /// This method does **not** consider the `members` list. + fn is_excluded(&self, manifest_path: &Path) -> bool { + let excluded = self + .exclude + .iter() + .any(|ex| manifest_path.starts_with(self.root_dir.join(ex))); + + let explicit_member = match self.members { + Some(ref members) => members + .iter() + .any(|mem| manifest_path.starts_with(self.root_dir.join(mem))), + None => false, + }; + + !explicit_member && excluded + } + + fn has_members_list(&self) -> bool { + self.members.is_some() + } + + fn members_paths(&self, globs: &[String]) -> CargoResult> { + let mut expanded_list = Vec::new(); + + for glob in globs { + let pathbuf = self.root_dir.join(glob); + let expanded_paths = Self::expand_member_path(&pathbuf)?; + + // If glob does not find any valid paths, then put the original + // path in the expanded list to maintain backwards compatibility. + if expanded_paths.is_empty() { + expanded_list.push(pathbuf); + } else { + expanded_list.extend(expanded_paths); + } + } + + Ok(expanded_list) + } + + fn expand_member_path(path: &Path) -> CargoResult> { + let path = match path.to_str() { + Some(p) => p, + None => return Ok(Vec::new()), + }; + let res = + glob(path).chain_err(|| failure::format_err!("could not parse pattern `{}`", &path))?; + let res = res + .map(|p| { + p.chain_err(|| failure::format_err!("unable to match path to pattern `{}`", &path)) + }) + .collect::, _>>()?; + Ok(res) + } +} diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index 669c5d5bf27..64e1ea2eae5 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -1,256 +1,241 @@ -#![deny(unused)] #![cfg_attr(test, deny(warnings))] +// While we're getting used to 2018: +#![warn(rust_2018_idioms)] +// Clippy isn't enforced by CI (@alexcrichton isn't a fan). +#![allow(clippy::blacklisted_name)] // frequently used in tests +#![allow(clippy::cognitive_complexity)] // large project +#![allow(clippy::derive_hash_xor_eq)] // there's an intentional incoherence +#![allow(clippy::explicit_into_iter_loop)] // explicit loops are clearer +#![allow(clippy::explicit_iter_loop)] // explicit loops are clearer +#![allow(clippy::identity_op)] // used for vertical alignment +#![allow(clippy::implicit_hasher)] // large project +#![allow(clippy::large_enum_variant)] // large project +#![allow(clippy::new_without_default)] // explicit is maybe clearer +#![allow(clippy::redundant_closure)] // closures can be less verbose +#![allow(clippy::redundant_closure_call)] // closures over try catch blocks +#![allow(clippy::too_many_arguments)] // large project +#![allow(clippy::type_complexity)] // there's an exceptionally complex type +#![allow(clippy::wrong_self_convention)] // perhaps `Rc` should be special-cased in Clippy? +#![warn(clippy::needless_borrow)] +#![warn(clippy::redundant_clone)] +// Unit is now interned, and would probably be better as pass-by-copy, but +// doing so causes a lot of & and * shenanigans that makes the code arguably +// less clear and harder to read. +#![allow(clippy::trivially_copy_pass_by_ref)] +// exhaustively destructuring ensures future fields are handled +#![allow(clippy::unneeded_field_pattern)] + +use std::fmt; +use std::io; -#[cfg(test)] extern crate hamcrest; -#[macro_use] extern crate log; -extern crate crates_io as registry; -extern crate curl; -extern crate docopt; -extern crate filetime; -extern crate flate2; -extern crate git2; -extern crate glob; -extern crate libc; -extern crate libgit2_sys; -extern crate num_cpus; -extern crate regex; -extern crate rustc_serialize; -extern crate semver; -extern crate tar; -extern crate term; -extern crate threadpool; -extern crate time; -extern crate toml; -extern crate url; +use failure::Error; +use log::debug; +use serde::ser; -use std::env; -use std::error::Error; -use std::io::prelude::*; -use std::io; -use rustc_serialize::{Decodable, Encodable}; -use rustc_serialize::json::{self, Json}; -use docopt::Docopt; +use crate::core::shell::Verbosity::Verbose; +use crate::core::Shell; + +pub use crate::util::errors::Internal; +pub use crate::util::{CargoResult, CliError, CliResult, Config}; -use core::{Shell, MultiShell, ShellConfig, Verbosity, ColorConfig}; -use core::shell::Verbosity::{Verbose}; -use core::shell::ColorConfig::{Auto}; -use term::color::{BLACK, RED}; +pub const CARGO_ENV: &str = "CARGO"; -pub use util::{CargoError, CliError, CliResult, human, Config, ChainError}; +#[macro_use] +mod macros; pub mod core; pub mod ops; pub mod sources; pub mod util; -pub fn execute_main( - exec: fn(T, U, &Config) -> CliResult>, - options_first: bool, - usage: &str) - where V: Encodable, T: Decodable, U: Decodable -{ - process::(|rest, shell| { - call_main(exec, shell, usage, rest, options_first) - }); +pub struct CommitInfo { + pub short_commit_hash: String, + pub commit_hash: String, + pub commit_date: String, } -pub fn call_main( - exec: fn(T, U, &Config) -> CliResult>, - shell: &Config, - usage: &str, - args: &[String], - options_first: bool) -> CliResult> - where V: Encodable, T: Decodable, U: Decodable -{ - let flags = try!(flags_from_args::(usage, args, options_first)); - let json = try!(json_from_stdin::()); - - exec(flags, json, shell) +pub struct CfgInfo { + // Information about the Git repository we may have been built from. + pub commit_info: Option, + // The release channel we were built for. + pub release_channel: String, } -pub fn execute_main_without_stdin( - exec: fn(T, &Config) -> CliResult>, - options_first: bool, - usage: &str) - where V: Encodable, T: Decodable -{ - process::(|rest, shell| { - call_main_without_stdin(exec, shell, usage, rest, options_first) - }); +pub struct VersionInfo { + pub major: u8, + pub minor: u8, + pub patch: u8, + pub pre_release: Option, + // Information that's only available when we were built with + // configure/make, rather than Cargo itself. + pub cfg_info: Option, } -pub fn call_main_without_stdin( - exec: fn(T, &Config) -> CliResult>, - shell: &Config, - usage: &str, - args: &[String], - options_first: bool) -> CliResult> - where V: Encodable, T: Decodable -{ - let flags = try!(flags_from_args::(usage, args, options_first)); - exec(flags, shell) +impl fmt::Display for VersionInfo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "cargo {}.{}.{}", self.major, self.minor, self.patch)?; + if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) { + if channel != "stable" { + write!(f, "-{}", channel)?; + let empty = String::new(); + write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?; + } + }; + + if let Some(ref cfg) = self.cfg_info { + if let Some(ref ci) = cfg.commit_info { + write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?; + } + }; + Ok(()) + } } -fn process(mut callback: F) - where F: FnMut(&[String], &Config) -> CliResult>, - V: Encodable -{ - let mut config = None; - let result = (|| { - config = Some(try!(Config::new(shell(Verbose, Auto)))); - let args: Vec<_> = try!(env::args_os().map(|s| { - s.into_string().map_err(|s| { - human(format!("invalid unicode in argument: {:?}", s)) - }) - }).collect()); - callback(&args, config.as_ref().unwrap()) - })(); - let mut verbose_shell = shell(Verbose, Auto); - let mut shell = config.as_ref().map(|s| s.shell()); - let shell = shell.as_mut().map(|s| &mut **s).unwrap_or(&mut verbose_shell); - process_executed(result, shell) +pub fn print_json(obj: &T) { + let encoded = serde_json::to_string(&obj).unwrap(); + println!("{}", encoded); } -pub fn process_executed(result: CliResult>, shell: &mut MultiShell) - where T: Encodable -{ - match result { - Err(e) => handle_error(e, shell), - Ok(Some(encodable)) => { - let encoded = json::encode(&encodable).unwrap(); - println!("{}", encoded); +pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! { + debug!("exit_with_error; err={:?}", err); + if let Some(ref err) = err.error { + if let Some(clap_err) = err.downcast_ref::() { + clap_err.exit() } - Ok(None) => {} } -} -pub fn shell(verbosity: Verbosity, color_config: ColorConfig) -> MultiShell { - let tty = isatty(libc::STDERR_FILENO); - let stderr = Box::new(io::stderr()); - - let config = ShellConfig { color_config: color_config, tty: tty }; - let err = Shell::create(stderr, config); - - let tty = isatty(libc::STDOUT_FILENO); - let stdout = Box::new(io::stdout()); - - let config = ShellConfig { color_config: color_config, tty: tty }; - let out = Shell::create(stdout, config); - - return MultiShell::new(out, err, verbosity); + let CliError { + error, + exit_code, + unknown, + } = err; + // `exit_code` of 0 means non-fatal error (e.g., docopt version info). + let fatal = exit_code != 0; + + let hide = unknown && shell.verbosity() != Verbose; + + if let Some(error) = error { + if hide { + drop(shell.error("An unknown error occurred")) + } else if fatal { + drop(shell.error(&error)) + } else { + println!("{}", error); + } - #[cfg(unix)] - fn isatty(fd: libc::c_int) -> bool { - unsafe { libc::isatty(fd) != 0 } - } - #[cfg(windows)] - fn isatty(fd: libc::c_int) -> bool { - extern crate kernel32; - extern crate winapi; - unsafe { - let handle = kernel32::GetStdHandle(if fd == libc::STDOUT_FILENO { - winapi::winbase::STD_OUTPUT_HANDLE - } else { - winapi::winbase::STD_ERROR_HANDLE - }); - let mut out = 0; - kernel32::GetConsoleMode(handle, &mut out) != 0 + if !handle_cause(&error, shell) || hide { + drop(writeln!( + shell.err(), + "\nTo learn more, run the command again \ + with --verbose." + )); } } -} -// `output` print variant error strings to either stderr or stdout. -// For fatal errors, print to stderr; -// and for others, e.g. docopt version info, print to stdout. -fn output(err: String, shell: &mut MultiShell, fatal: bool) { - let std_shell = if fatal {shell.err()} else {shell.out()}; - let color = if fatal {RED} else {BLACK}; - let _ = std_shell.say(err, color); + std::process::exit(exit_code) } -pub fn handle_error(err: CliError, shell: &mut MultiShell) { +pub fn handle_error(err: &failure::Error, shell: &mut Shell) { debug!("handle_error; err={:?}", err); - let CliError { error, exit_code, unknown } = err; - let fatal = exit_code != 0; // exit_code == 0 is non-fatal error + let _ignored_result = shell.error(err); + handle_cause(err, shell); +} - let hide = unknown && shell.get_verbose() != Verbose; - if hide { - let _ = shell.err().say("An unknown error occurred", RED); - } else { - output(error.to_string(), shell, fatal); +fn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool { + fn print(error: &str, shell: &mut Shell) { + drop(writeln!(shell.err(), "\nCaused by:")); + drop(writeln!(shell.err(), " {}", error)); } - if !handle_cause(&error, shell) || hide { - let _ = shell.err().say("\nTo learn more, run the command again \ - with --verbose.".to_string(), BLACK); + + fn print_stderror_causes(error: &dyn std::error::Error, shell: &mut Shell) { + let mut cur = std::error::Error::source(error); + while let Some(err) = cur { + print(&err.to_string(), shell); + cur = std::error::Error::source(err); + } } - std::process::exit(exit_code); -} + let verbose = shell.verbosity(); -fn handle_cause(mut cargo_err: &CargoError, shell: &mut MultiShell) -> bool { - let verbose = shell.get_verbose(); - let mut err; - loop { - cargo_err = match cargo_err.cargo_cause() { - Some(cause) => cause, - None => { err = cargo_err.cause(); break } - }; - if verbose != Verbose && !cargo_err.is_human() { return false } - print(cargo_err.to_string(), shell); - } - loop { - let cause = match err { Some(err) => err, None => return true }; - if verbose != Verbose { return false } - print(cause.to_string(), shell); - err = cause.cause(); - } + // The first error has already been printed to the shell. + for err in cargo_err.iter_causes() { + // If we're not in verbose mode then print remaining errors until one + // marked as `Internal` appears. + if verbose != Verbose && err.downcast_ref::().is_some() { + return false; + } - fn print(error: String, shell: &mut MultiShell) { - let _ = shell.err().say("\nCaused by:", BLACK); - let _ = shell.err().say(format!(" {}", error), BLACK); + print(&err.to_string(), shell); + + // Using the `failure` crate currently means that when using + // `iter_causes` we're only iterating over the `failure` causes, but + // this doesn't include the causes from the standard library `Error` + // trait. We don't have a great way of getting an `&dyn Error` from a + // `&dyn Fail`, so we currently just special case a few errors that are + // known to maybe have causes and we try to print them here. + // + // Note that this isn't an exhaustive match since causes for + // `std::error::Error` aren't the most common thing in the world. + if let Some(io) = err.downcast_ref::() { + print_stderror_causes(io, shell); + } } -} -pub fn version() -> String { - format!("cargo {}", match option_env!("CFG_VERSION") { - Some(s) => s.to_string(), - None => format!("{}.{}.{}{}", - env!("CARGO_PKG_VERSION_MAJOR"), - env!("CARGO_PKG_VERSION_MINOR"), - env!("CARGO_PKG_VERSION_PATCH"), - option_env!("CARGO_PKG_VERSION_PRE").unwrap_or("")) - }) + true } -fn flags_from_args<'a, T>(usage: &str, args: &[String], - options_first: bool) -> CliResult - where T: Decodable -{ - let docopt = Docopt::new(usage).unwrap() - .options_first(options_first) - .argv(args.iter().map(|s| &s[..])) - .help(true) - .version(Some(version())); - docopt.decode().map_err(|e| { - let code = if e.fatal() {1} else {0}; - CliError::from_error(human(e.to_string()), code) - }) -} - -fn json_from_stdin() -> CliResult { - let mut reader = io::stdin(); - let mut input = String::new(); - try!(reader.read_to_string(&mut input).map_err(|_| { - CliError::new("Standard in did not exist or was not UTF-8", 1) - })); - - let json = try!(Json::from_str(&input).map_err(|_| { - CliError::new("Could not parse standard in as JSON", 1) - })); - let mut decoder = json::Decoder::new(json); +pub fn version() -> VersionInfo { + macro_rules! option_env_str { + ($name:expr) => { + option_env!($name).map(|s| s.to_string()) + }; + } - Decodable::decode(&mut decoder).map_err(|_| { - CliError::new("Could not process standard in as input", 1) - }) + // So this is pretty horrible... + // There are two versions at play here: + // - version of cargo-the-binary, which you see when you type `cargo --version` + // - version of cargo-the-library, which you download from crates.io for use + // in your packages. + // + // We want to make the `binary` version the same as the corresponding Rust/rustc release. + // At the same time, we want to keep the library version at `0.x`, because Cargo as + // a library is (and probably will always be) unstable. + // + // Historically, Cargo used the same version number for both the binary and the library. + // Specifically, rustc 1.x.z was paired with cargo 0.x+1.w. + // We continue to use this scheme for the library, but transform it to 1.x.w for the purposes + // of `cargo --version`. + let major = 1; + let minor = env!("CARGO_PKG_VERSION_MINOR").parse::().unwrap() - 1; + let patch = env!("CARGO_PKG_VERSION_PATCH").parse::().unwrap(); + + match option_env!("CFG_RELEASE_CHANNEL") { + // We have environment variables set up from configure/make. + Some(_) => { + let commit_info = option_env!("CFG_COMMIT_HASH").map(|s| CommitInfo { + commit_hash: s.to_string(), + short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(), + commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(), + }); + VersionInfo { + major, + minor, + patch, + pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"), + cfg_info: Some(CfgInfo { + release_channel: option_env_str!("CFG_RELEASE_CHANNEL").unwrap(), + commit_info, + }), + } + } + // We are being compiled by Cargo itself. + None => VersionInfo { + major, + minor, + patch, + pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"), + cfg_info: None, + }, + } } diff --git a/src/cargo/macros.rs b/src/cargo/macros.rs new file mode 100644 index 00000000000..3ebf3b37f67 --- /dev/null +++ b/src/cargo/macros.rs @@ -0,0 +1,49 @@ +use std::fmt; + +macro_rules! compact_debug { + ( + impl fmt::Debug for $ty:ident { + fn fmt(&$this:ident, f: &mut fmt::Formatter) -> fmt::Result { + let (default, default_name) = $e:expr; + [debug_the_fields($($field:ident)*)] + } + } + ) => ( + + impl fmt::Debug for $ty { + fn fmt(&$this, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Try printing a pretty version where we collapse as many fields as + // possible, indicating that they're equivalent to a function call + // that's hopefully enough to indicate what each value is without + // actually dumping everything so verbosely. + let mut s = f.debug_struct(stringify!($ty)); + let (default, default_name) = $e; + let mut any_default = false; + + // Exhaustively match so when fields are added we get a compile + // failure + let $ty { $($field),* } = $this; + $( + if *$field == default.$field { + any_default = true; + } else { + s.field(stringify!($field), $field); + } + )* + + if any_default { + s.field("..", &crate::macros::DisplayAsDebug(default_name)); + } + s.finish() + } + } + ) +} + +pub struct DisplayAsDebug(pub T); + +impl fmt::Debug for DisplayAsDebug { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } +} diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index 03b0575556d..af15ab26525 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -1,72 +1,130 @@ -use std::default::Default; +use std::collections::HashMap; use std::fs; -use std::io::prelude::*; use std::path::Path; -use core::{PackageSet, Profiles, Profile}; -use core::source::{Source, SourceMap}; -use sources::PathSource; -use util::{CargoResult, human, ChainError, Config}; -use ops::{self, Layout, Context, BuildConfig, Kind}; +use crate::core::compiler::UnitInterner; +use crate::core::compiler::{BuildConfig, BuildContext, CompileMode, Context, Kind}; +use crate::core::profiles::UnitFor; +use crate::core::Workspace; +use crate::ops; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::paths; +use crate::util::Config; pub struct CleanOptions<'a> { - pub spec: Option<&'a str>, - pub target: Option<&'a str>, pub config: &'a Config, + /// A list of packages to clean. If empty, everything is cleaned. + pub spec: Vec, + /// The target arch triple to clean, or None for the host arch + pub target: Option, + /// Whether to clean the release directory + pub release: bool, + /// Whether to just clean the doc directory + pub doc: bool, } -/// Cleans the project from build artifacts. -pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { - let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), - opts.config)); - try!(src.update()); - let root = try!(src.root_package()); - let target_dir = opts.config.target_dir(&root); +/// Cleans the package's build artifacts. +pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { + let mut target_dir = ws.target_dir(); + let config = ws.config(); - // If we have a spec, then we need to delete some package,s otherwise, just + // If the doc option is set, we just want to delete the doc directory. + if opts.doc { + target_dir = target_dir.join("doc"); + return rm_rf(&target_dir.into_path_unlocked(), config); + } + + // If the release option is set, we set target to release directory + if opts.release { + target_dir = target_dir.join("release"); + } + + // If we have a spec, then we need to delete some packages, otherwise, just // remove the whole target directory and be done with it! - let spec = match opts.spec { - Some(spec) => spec, - None => return rm_rf(&target_dir), - }; + // + // Note that we don't bother grabbing a lock here as we're just going to + // blow it all away anyway. + if opts.spec.is_empty() { + return rm_rf(&target_dir.into_path_unlocked(), config); + } + + let (packages, resolve) = ops::resolve_ws(ws)?; + + let profiles = ws.profiles(); + let interner = UnitInterner::new(); + let mut build_config = BuildConfig::new(config, Some(1), &opts.target, CompileMode::Build)?; + build_config.release = opts.release; + let bcx = BuildContext::new( + ws, + &resolve, + &packages, + opts.config, + &build_config, + profiles, + &interner, + HashMap::new(), + )?; + let mut units = Vec::new(); - // Load the lockfile (if one's available), and resolve spec to a pkgid - let lockfile = root.root().join("Cargo.lock"); - let source_id = root.package_id().source_id(); - let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) { - Some(resolve) => resolve, - None => return Err(human("A Cargo.lock must exist before cleaning")) - }; - let pkgid = try!(resolve.query(spec)); + for spec in opts.spec.iter() { + // Translate the spec to a Package + let pkgid = resolve.query(spec)?; + let pkg = packages.get_one(pkgid)?; - // Translate the PackageId to a Package - let pkg = { - let mut source = pkgid.source_id().load(opts.config); - try!(source.update()); - (try!(source.get(&[pkgid.clone()]))).into_iter().next().unwrap() - }; + // Generate all relevant `Unit` targets for this package + for target in pkg.targets() { + for kind in [Kind::Host, Kind::Target].iter() { + for mode in CompileMode::all_modes() { + for unit_for in UnitFor::all_values() { + let profile = if mode.is_run_custom_build() { + profiles.get_profile_run_custom_build(&profiles.get_profile( + pkg.package_id(), + ws.is_member(pkg), + *unit_for, + CompileMode::Build, + opts.release, + )) + } else { + profiles.get_profile( + pkg.package_id(), + ws.is_member(pkg), + *unit_for, + *mode, + opts.release, + ) + }; + units.push(bcx.units.intern(pkg, target, profile, *kind, *mode)); + } + } + } + } + } + + let mut cx = Context::new(config, &bcx)?; + cx.prepare_units(None, &units)?; - // Create a compilation context to have access to information like target - // filenames and such - let srcs = SourceMap::new(); - let pkgs = PackageSet::new(&[]); - let profiles = Profiles::default(); - let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config, - Layout::at(target_dir), - None, &pkg, BuildConfig::default(), - &profiles)); + for unit in units.iter() { + if unit.mode.is_doc() || unit.mode.is_doc_test() { + // Cleaning individual rustdoc crates is currently not supported. + // For example, the search index would need to be rebuilt to fully + // remove it (otherwise you're left with lots of broken links). + // Doc tests produce no output. + continue; + } + rm_rf(&cx.files().fingerprint_dir(unit), config)?; + if unit.target.is_custom_build() { + if unit.mode.is_run_custom_build() { + rm_rf(&cx.files().build_script_out_dir(unit), config)?; + } else { + rm_rf(&cx.files().build_script_dir(unit), config)?; + } + continue; + } - // And finally, clean everything out! - for target in pkg.targets().iter() { - // TODO: `cargo clean --release` - let layout = Layout::new(opts.config, &root, opts.target, "debug"); - try!(rm_rf(&layout.fingerprint(&pkg))); - let profiles = [Profile::default_dev(), Profile::default_test()]; - for profile in profiles.iter() { - for filename in try!(cx.target_filenames(&pkg, target, profile, - Kind::Target)).iter() { - try!(rm_rf(&layout.dest().join(&filename))); - try!(rm_rf(&layout.deps().join(&filename))); + for output in cx.outputs(unit)?.iter() { + rm_rf(&output.path, config)?; + if let Some(ref dst) = output.hardlink { + rm_rf(dst, config)?; } } } @@ -74,16 +132,20 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { Ok(()) } -fn rm_rf(path: &Path) -> CargoResult<()> { +fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> { let m = fs::metadata(path); if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { - try!(fs::remove_dir_all(path).chain_error(|| { - human("could not remove build directory") - })); + config + .shell() + .verbose(|shell| shell.status("Removing", path.display()))?; + paths::remove_dir_all(path) + .chain_err(|| failure::format_err!("could not remove build directory"))?; } else if m.is_ok() { - try!(fs::remove_file(path).chain_error(|| { - human("failed to remove build artifact") - })); + config + .shell() + .verbose(|shell| shell.status("Removing", path.display()))?; + paths::remove_file(path) + .chain_err(|| failure::format_err!("failed to remove build artifact"))?; } Ok(()) } diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 74d0b09423e..653b1a6e8e0 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -1,468 +1,939 @@ +//! The Cargo "compile" operation. //! -//! Cargo compile currently does the following steps: +//! This module contains the entry point for starting the compilation process +//! for commands like `build`, `test`, `doc`, `rustc`, etc. //! -//! All configurations are already injected as environment variables via the -//! main cargo command +//! The `compile` function will do all the work to compile a workspace. A +//! rough outline is: //! -//! 1. Read the manifest -//! 2. Shell out to `cargo-resolve` with a list of dependencies and sources as -//! stdin -//! -//! a. Shell out to `--do update` and `--do list` for each source -//! b. Resolve dependencies and return a list of name/version/source -//! -//! 3. Shell out to `--do download` for each source -//! 4. Shell out to `--do get` for each source, and build up the list of paths -//! to pass to rustc -L -//! 5. Call `cargo-rustc` with the results of the resolver zipped together with -//! the results of the `get` -//! -//! a. Topologically sort the dependencies -//! b. Compile each dependency in order, passing in the -L's pointing at each -//! previously compiled dependency -//! - -use std::collections::HashMap; -use std::default::Default; -use std::path::{Path, PathBuf}; +//! - Resolve the dependency graph (see `ops::resolve`). +//! - Download any packages needed (see `PackageSet`). +//! - Generate a list of top-level "units" of work for the targets the user +//! requested on the command-line. Each `Unit` corresponds to a compiler +//! invocation. This is done in this module (`generate_targets`). +//! - Create a `Context` which will perform the following steps: +//! - Build the graph of `Unit` dependencies (see +//! `core::compiler::context::unit_dependencies`). +//! - Prepare the `target` directory (see `Layout`). +//! - Create a job queue (see `JobQueue`). The queue checks the +//! fingerprint of each `Unit` to determine if it should run or be +//! skipped. +//! - Execute the queue. Each leaf in the queue's dependency graph is +//! executed, and then removed from the graph when finished. This +//! repeats until the queue is empty. + +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::iter::FromIterator; +use std::path::PathBuf; use std::sync::Arc; -use core::registry::PackageRegistry; -use core::{Source, SourceId, PackageSet, Package, Target, PackageId}; -use core::{Profile, TargetKind}; -use core::resolver::Method; -use ops::{self, BuildOutput, ExecEngine}; -use sources::{PathSource}; -use util::config::{ConfigValue, Config}; -use util::{CargoResult, internal, human, ChainError, profile}; - -/// Contains informations about how a package should be compiled. +use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; +use crate::core::compiler::{CompileMode, Kind, Unit}; +use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; +use crate::core::profiles::{Profiles, UnitFor}; +use crate::core::resolver::{Resolve, ResolveOpts}; +use crate::core::{Package, Target}; +use crate::core::{PackageId, PackageIdSpec, TargetKind, Workspace}; +use crate::ops; +use crate::util::config::Config; +use crate::util::{closest_msg, profile, CargoResult, Platform}; + +/// Contains information about how a package should be compiled. +#[derive(Debug)] pub struct CompileOptions<'a> { pub config: &'a Config, - /// Number of concurrent jobs to use. - pub jobs: Option, - /// The target platform to compile for (example: `i686-unknown-linux-gnu`). - pub target: Option<&'a str>, + /// Configuration information for a rustc build + pub build_config: BuildConfig, /// Extra features to build for the root package - pub features: &'a [String], + pub features: Vec, + /// Flag whether all available features should be built for the root package + pub all_features: bool, /// Flag if the default feature should be built for the root package pub no_default_features: bool, - /// Root package to build (if None it's the current one) - pub spec: Option<&'a str>, + /// A set of packages to build. + pub spec: Packages, /// Filter to apply to the root package to select which targets will be /// built. - pub filter: CompileFilter<'a>, - /// Engine which drives compilation - pub exec_engine: Option>>, - /// Whether this is a release build or not - pub release: bool, - /// Mode for this compile. - pub mode: CompileMode, + pub filter: CompileFilter, + /// Extra arguments to be passed to rustdoc (single target only) + pub target_rustdoc_args: Option>, /// The specified target will be compiled with all the available arguments, /// note that this only accounts for the *final* invocation of rustc - pub target_rustc_args: Option<&'a [String]>, -} - -#[derive(Clone, Copy, PartialEq)] -pub enum CompileMode { - Test, - Build, - Bench, - Doc { deps: bool }, + pub target_rustc_args: Option>, + /// Extra arguments passed to all selected targets for rustdoc. + pub local_rustdoc_args: Option>, + /// The directory to copy final artifacts to. Note that even if `out_dir` is + /// set, a copy of artifacts still could be found a `target/(debug\release)` + /// as usual. + // Note that, although the cmd-line flag name is `out-dir`, in code we use + // `export_dir`, to avoid confusion with out dir at `target/debug/deps`. + pub export_dir: Option, } -pub enum CompileFilter<'a> { - Everything, - Only { - lib: bool, - bins: &'a [String], - examples: &'a [String], - tests: &'a [String], - benches: &'a [String], +impl<'a> CompileOptions<'a> { + pub fn new(config: &'a Config, mode: CompileMode) -> CargoResult> { + Ok(CompileOptions { + config, + build_config: BuildConfig::new(config, None, &None, mode)?, + features: Vec::new(), + all_features: false, + no_default_features: false, + spec: ops::Packages::Packages(Vec::new()), + filter: CompileFilter::Default { + required_features_filterable: false, + }, + target_rustdoc_args: None, + target_rustc_args: None, + local_rustdoc_args: None, + export_dir: None, + }) } } -pub fn compile<'a>(manifest_path: &Path, - options: &CompileOptions<'a>) - -> CargoResult> { - debug!("compile; manifest-path={}", manifest_path.display()); - - let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), - options.config)); - try!(source.update()); - - // TODO: Move this into PathSource - let package = try!(source.root_package()); - debug!("loaded package; package={}", package); - - for key in package.manifest().warnings().iter() { - try!(options.config.shell().warn(key)) - } - compile_pkg(&package, Some(Box::new(source)), options) +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum Packages { + Default, + All, + OptOut(Vec), + Packages(Vec), } -pub fn compile_pkg<'a>(package: &Package, - source: Option>, - options: &CompileOptions<'a>) - -> CargoResult> { - let CompileOptions { config, jobs, target, spec, features, - no_default_features, release, mode, - ref filter, ref exec_engine, - ref target_rustc_args } = *options; - - let target = target.map(|s| s.to_string()); - let features = features.iter().flat_map(|s| { - s.split(' ') - }).map(|s| s.to_string()).collect::>(); - - if spec.is_some() && (no_default_features || features.len() > 0) { - return Err(human("features cannot be modified when the main package \ - is not being built")) +impl Packages { + pub fn from_flags(all: bool, exclude: Vec, package: Vec) -> CargoResult { + Ok(match (all, exclude.len(), package.len()) { + (false, 0, 0) => Packages::Default, + (false, 0, _) => Packages::Packages(package), + (false, _, _) => failure::bail!("--exclude can only be used together with --all"), + (true, 0, _) => Packages::All, + (true, _, _) => Packages::OptOut(exclude), + }) } - if jobs == Some(0) { - return Err(human("jobs must be at least 1")) + + pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult> { + let specs = match self { + Packages::All => ws + .members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect(), + Packages::OptOut(opt_out) => { + let mut opt_out = BTreeSet::from_iter(opt_out.iter().cloned()); + let packages = ws + .members() + .filter(|pkg| !opt_out.remove(pkg.name().as_str())) + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect(); + if !opt_out.is_empty() { + ws.config().shell().warn(format!( + "excluded package(s) {} not found in workspace `{}`", + opt_out + .iter() + .map(|x| x.as_ref()) + .collect::>() + .join(", "), + ws.root().display(), + ))?; + } + packages + } + Packages::Packages(packages) if packages.is_empty() => { + vec![PackageIdSpec::from_package_id(ws.current()?.package_id())] + } + Packages::Packages(packages) => packages + .iter() + .map(|p| PackageIdSpec::parse(p)) + .collect::>>()?, + Packages::Default => ws + .default_members() + .map(Package::package_id) + .map(PackageIdSpec::from_package_id) + .collect(), + }; + if specs.is_empty() { + if ws.is_virtual() { + failure::bail!( + "manifest path `{}` contains no package: The manifest is virtual, \ + and the workspace has no members.", + ws.root().display() + ) + } + failure::bail!("no packages to compile") + } + Ok(specs) } - let override_ids = try!(source_ids_from_config(config, package.root())); + pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult> { + let packages: Vec<_> = match self { + Packages::Default => ws.default_members().collect(), + Packages::All => ws.members().collect(), + Packages::OptOut(opt_out) => ws + .members() + .filter(|pkg| !opt_out.iter().any(|name| pkg.name().as_str() == name)) + .collect(), + Packages::Packages(packages) => packages + .iter() + .map(|name| { + ws.members() + .find(|pkg| pkg.name().as_str() == name) + .ok_or_else(|| { + failure::format_err!( + "package `{}` is not a member of the workspace", + name + ) + }) + }) + .collect::>>()?, + }; + Ok(packages) + } - let (packages, resolve_with_overrides, sources) = { - let mut registry = PackageRegistry::new(config); - if let Some(source) = source { - registry.preload(package.package_id().source_id(), source); - } else { - try!(registry.add_sources(&[package.package_id().source_id() - .clone()])); + /// Returns whether or not the user needs to pass a `-p` flag to target a + /// specific package in the workspace. + pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool { + match self { + Packages::Default => ws.default_members().count() > 1, + Packages::All => ws.members().count() > 1, + Packages::Packages(_) => true, + Packages::OptOut(_) => true, } + } +} - // First, resolve the package's *listed* dependencies, as well as - // downloading and updating all remotes and such. - let resolve = try!(ops::resolve_pkg(&mut registry, package)); +#[derive(Debug, PartialEq, Eq)] +pub enum LibRule { + /// Include the library, fail if not present + True, + /// Include the library if present + Default, + /// Exclude the library + False, +} - // Second, resolve with precisely what we're doing. Filter out - // transitive dependencies if necessary, specify features, handle - // overrides, etc. - let _p = profile::start("resolving w/ overrides..."); +#[derive(Debug)] +pub enum FilterRule { + All, + Just(Vec), +} - try!(registry.add_overrides(override_ids)); +#[derive(Debug)] +pub enum CompileFilter { + Default { + /// Flag whether targets can be safely skipped when required-features are not satisfied. + required_features_filterable: bool, + }, + Only { + all_targets: bool, + lib: LibRule, + bins: FilterRule, + examples: FilterRule, + tests: FilterRule, + benches: FilterRule, + }, +} - let method = Method::Required { - dev_deps: true, // TODO: remove this option? - features: &features, - uses_default_features: !no_default_features, - }; +pub fn compile<'a>( + ws: &Workspace<'a>, + options: &CompileOptions<'a>, +) -> CargoResult> { + let exec: Arc = Arc::new(DefaultExecutor); + compile_with_exec(ws, options, &exec) +} - let resolved_with_overrides = - try!(ops::resolve_with_previous(&mut registry, package, method, - Some(&resolve), None)); +/// Like `compile` but allows specifying a custom `Executor` that will be able to intercept build +/// calls and add custom logic. `compile` uses `DefaultExecutor` which just passes calls through. +pub fn compile_with_exec<'a>( + ws: &Workspace<'a>, + options: &CompileOptions<'a>, + exec: &Arc, +) -> CargoResult> { + ws.emit_warnings()?; + compile_ws(ws, options, exec) +} - let req: Vec = resolved_with_overrides.iter().map(|r| { - r.clone() - }).collect(); - let packages = try!(registry.get(&req).chain_error(|| { - human("Unable to get packages from source") - })); +pub fn compile_ws<'a>( + ws: &Workspace<'a>, + options: &CompileOptions<'a>, + exec: &Arc, +) -> CargoResult> { + let CompileOptions { + config, + ref build_config, + ref spec, + ref features, + all_features, + no_default_features, + ref filter, + ref target_rustdoc_args, + ref target_rustc_args, + ref local_rustdoc_args, + ref export_dir, + } = *options; + + match build_config.mode { + CompileMode::Test + | CompileMode::Build + | CompileMode::Check { .. } + | CompileMode::Bench + | CompileMode::RunCustomBuild => { + if std::env::var("RUST_FLAGS").is_ok() { + config.shell().warn( + "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?", + )?; + } + } + CompileMode::Doc { .. } | CompileMode::Doctest => { + if std::env::var("RUSTDOC_FLAGS").is_ok() { + config.shell().warn( + "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?" + )?; + } + } + } - (packages, resolved_with_overrides, registry.move_sources()) + let default_arch_kind = if build_config.requested_target.is_some() { + Kind::Target + } else { + Kind::Host }; - let pkgid = match spec { - Some(spec) => try!(resolve_with_overrides.query(spec)), - None => package.package_id(), - }; - let to_build = packages.iter().find(|p| p.package_id() == pkgid).unwrap(); - let targets = try!(generate_targets(to_build, mode, filter, release)); - - let target_with_args = match *target_rustc_args { - Some(args) if targets.len() == 1 => { - let (target, profile) = targets[0]; - let mut profile = profile.clone(); - profile.rustc_args = Some(args.to_vec()); - Some((target, profile)) + let specs = spec.to_package_id_specs(ws)?; + let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode); + let opts = ResolveOpts::new(dev_deps, features, all_features, !no_default_features); + let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?; + let (packages, resolve_with_overrides) = resolve; + + let to_build_ids = specs + .iter() + .map(|s| s.query(resolve_with_overrides.iter())) + .collect::>>()?; + let mut to_builds = packages.get_many(to_build_ids)?; + + // The ordering here affects some error messages coming out of cargo, so + // let's be test and CLI friendly by always printing in the same order if + // there's an error. + to_builds.sort_by_key(|p| p.package_id()); + + for pkg in to_builds.iter() { + pkg.manifest().print_teapot(ws.config()); + + if build_config.mode.is_any_test() + && !ws.is_member(pkg) + && pkg.dependencies().iter().any(|dep| !dep.is_transitive()) + { + failure::bail!( + "package `{}` cannot be tested because it requires dev-dependencies \ + and is not a member of the workspace", + pkg.name() + ); } - Some(_) => { - return Err(human("extra arguments to `rustc` can only be passed to \ - one target, consider filtering\nthe package by \ - passing e.g. `--lib` or `--bin NAME` to specify \ - a single target")) - } - None => None, + } + + let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) { + (&Some(ref args), _) => (Some(args.clone()), "rustc"), + (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"), + _ => (None, ""), }; - let targets = target_with_args.as_ref().map(|&(t, ref p)| vec![(t, p)]) - .unwrap_or(targets); + if extra_args.is_some() && to_builds.len() != 1 { + panic!( + "`{}` should not accept multiple `-p` flags", + extra_args_name + ); + } + + let profiles = ws.profiles(); + profiles.validate_packages(&mut config.shell(), &packages)?; + + let interner = UnitInterner::new(); + let mut bcx = BuildContext::new( + ws, + &resolve_with_overrides, + &packages, + config, + build_config, + profiles, + &interner, + HashMap::new(), + )?; + let units = generate_targets( + ws, + profiles, + &to_builds, + filter, + default_arch_kind, + &resolve_with_overrides, + &bcx, + )?; + + if let Some(args) = extra_args { + if units.len() != 1 { + failure::bail!( + "extra arguments to `{}` can only be passed to one \ + target, consider filtering\nthe package by passing, \ + e.g., `--lib` or `--bin NAME` to specify a single target", + extra_args_name + ); + } + bcx.extra_compiler_args.insert(units[0], args); + } + if let Some(args) = local_rustdoc_args { + for unit in &units { + if unit.mode.is_doc() || unit.mode.is_doc_test() { + bcx.extra_compiler_args.insert(*unit, args.clone()); + } + } + } let ret = { let _p = profile::start("compiling"); - let mut build_config = try!(scrape_build_config(config, jobs, target)); - build_config.exec_engine = exec_engine.clone(); - build_config.release = release; - if let CompileMode::Doc { deps } = mode { - build_config.doc_all = deps; + let cx = Context::new(config, &bcx)?; + cx.compile(&units, export_dir.clone(), exec)? + }; + + Ok(ret) +} + +impl FilterRule { + pub fn new(targets: Vec, all: bool) -> FilterRule { + if all { + FilterRule::All + } else { + FilterRule::Just(targets) } + } - try!(ops::compile_targets(&targets, to_build, - &PackageSet::new(&packages), - &resolve_with_overrides, - &sources, - config, - build_config, - to_build.manifest().profiles())) - }; + pub fn none() -> FilterRule { + FilterRule::Just(Vec::new()) + } - return Ok(ret); + fn matches(&self, target: &Target) -> bool { + match *self { + FilterRule::All => true, + FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()), + } + } + + fn is_specific(&self) -> bool { + match *self { + FilterRule::All => true, + FilterRule::Just(ref targets) => !targets.is_empty(), + } + } + + pub fn try_collect(&self) -> Option> { + match *self { + FilterRule::All => None, + FilterRule::Just(ref targets) => Some(targets.clone()), + } + } } -impl<'a> CompileFilter<'a> { - pub fn new(lib_only: bool, - bins: &'a [String], - tests: &'a [String], - examples: &'a [String], - benches: &'a [String]) -> CompileFilter<'a> { - if lib_only || !bins.is_empty() || !tests.is_empty() || - !examples.is_empty() || !benches.is_empty() { +impl CompileFilter { + /// Construct a CompileFilter from raw command line arguments. + pub fn from_raw_arguments( + lib_only: bool, + bins: Vec, + all_bins: bool, + tsts: Vec, + all_tsts: bool, + exms: Vec, + all_exms: bool, + bens: Vec, + all_bens: bool, + all_targets: bool, + ) -> CompileFilter { + let rule_lib = if lib_only { + LibRule::True + } else { + LibRule::False + }; + let rule_bins = FilterRule::new(bins, all_bins); + let rule_tsts = FilterRule::new(tsts, all_tsts); + let rule_exms = FilterRule::new(exms, all_exms); + let rule_bens = FilterRule::new(bens, all_bens); + + if all_targets { + CompileFilter::Only { + all_targets: true, + lib: LibRule::Default, + bins: FilterRule::All, + examples: FilterRule::All, + benches: FilterRule::All, + tests: FilterRule::All, + } + } else { + CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens) + } + } + + /// Construct a CompileFilter from underlying primitives. + pub fn new( + rule_lib: LibRule, + rule_bins: FilterRule, + rule_tsts: FilterRule, + rule_exms: FilterRule, + rule_bens: FilterRule, + ) -> CompileFilter { + if rule_lib == LibRule::True + || rule_bins.is_specific() + || rule_tsts.is_specific() + || rule_exms.is_specific() + || rule_bens.is_specific() + { CompileFilter::Only { - lib: lib_only, bins: bins, examples: examples, benches: benches, - tests: tests, + all_targets: false, + lib: rule_lib, + bins: rule_bins, + examples: rule_exms, + benches: rule_bens, + tests: rule_tsts, } } else { - CompileFilter::Everything + CompileFilter::Default { + required_features_filterable: true, + } + } + } + + pub fn need_dev_deps(&self, mode: CompileMode) -> bool { + match mode { + CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true, + CompileMode::Build | CompileMode::Doc { .. } | CompileMode::Check { .. } => match *self + { + CompileFilter::Default { .. } => false, + CompileFilter::Only { + ref examples, + ref tests, + ref benches, + .. + } => examples.is_specific() || tests.is_specific() || benches.is_specific(), + }, + CompileMode::RunCustomBuild => panic!("Invalid mode"), } } - pub fn matches(&self, target: &Target) -> bool { + // this selects targets for "cargo run". for logic to select targets for + // other subcommands, see generate_targets and filter_default_targets + pub fn target_run(&self, target: &Target) -> bool { match *self { - CompileFilter::Everything => true, - CompileFilter::Only { lib, bins, examples, tests, benches } => { - let list = match *target.kind() { + CompileFilter::Default { .. } => true, + CompileFilter::Only { + ref lib, + ref bins, + ref examples, + ref tests, + ref benches, + .. + } => { + let rule = match *target.kind() { TargetKind::Bin => bins, TargetKind::Test => tests, TargetKind::Bench => benches, - TargetKind::Example => examples, - TargetKind::Lib(..) => return lib, + TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples, + TargetKind::Lib(..) => { + return match *lib { + LibRule::True => true, + LibRule::Default => true, + LibRule::False => false, + }; + } TargetKind::CustomBuild => return false, }; - list.iter().any(|x| *x == target.name()) + rule.matches(target) } } } + + pub fn is_specific(&self) -> bool { + match *self { + CompileFilter::Default { .. } => false, + CompileFilter::Only { .. } => true, + } + } } -/// Given the configuration for a build, this function will generate all -/// target/profile combinations needed to be built. -fn generate_targets<'a>(pkg: &'a Package, - mode: CompileMode, - filter: &CompileFilter, - release: bool) - -> CargoResult> { - let profiles = pkg.manifest().profiles(); - let build = if release {&profiles.release} else {&profiles.dev}; - let test = if release {&profiles.bench} else {&profiles.test}; - let profile = match mode { - CompileMode::Test => test, - CompileMode::Bench => &profiles.bench, - CompileMode::Build => build, - CompileMode::Doc { .. } => &profiles.doc, - }; - return match *filter { - CompileFilter::Everything => { - match mode { - CompileMode::Bench => { - Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| { - (t, profile) - }).collect::>()) +/// A proposed target. +/// +/// Proposed targets are later filtered into actual `Unit`s based on whether or +/// not the target requires its features to be present. +#[derive(Debug)] +struct Proposal<'a> { + pkg: &'a Package, + target: &'a Target, + /// Indicates whether or not all required features *must* be present. If + /// false, and the features are not available, then it will be silently + /// skipped. Generally, targets specified by name (`--bin foo`) are + /// required, all others can be silently skipped if features are missing. + requires_features: bool, + mode: CompileMode, +} + +/// Generates all the base targets for the packages the user has requested to +/// compile. Dependencies for these targets are computed later in `unit_dependencies`. +fn generate_targets<'a>( + ws: &Workspace<'_>, + profiles: &Profiles, + packages: &[&'a Package], + filter: &CompileFilter, + default_arch_kind: Kind, + resolve: &Resolve, + bcx: &BuildContext<'a, '_>, +) -> CargoResult>> { + // Helper for creating a `Unit` struct. + let new_unit = |pkg: &'a Package, target: &'a Target, target_mode: CompileMode| { + let unit_for = if bcx.build_config.mode.is_any_test() { + // NOTE: the `UnitFor` here is subtle. If you have a profile + // with `panic` set, the `panic` flag is cleared for + // tests/benchmarks and their dependencies. If this + // was `normal`, then the lib would get compiled three + // times (once with panic, once without, and once with + // `--test`). + // + // This would cause a problem for doc tests, which would fail + // because `rustdoc` would attempt to link with both libraries + // at the same time. Also, it's probably not important (or + // even desirable?) for rustdoc to link with a lib with + // `panic` set. + // + // As a consequence, Examples and Binaries get compiled + // without `panic` set. This probably isn't a bad deal. + // + // Forcing the lib to be compiled three times during `cargo + // test` is probably also not desirable. + UnitFor::new_test() + } else if target.for_host() { + // Proc macro / plugin should not have `panic` set. + UnitFor::new_compiler() + } else { + UnitFor::new_normal() + }; + // Custom build units are added in `build_unit_dependencies`. + assert!(!target.is_custom_build()); + let target_mode = match target_mode { + CompileMode::Test => { + if target.is_example() && !filter.is_specific() && !target.tested() { + // Examples are included as regular binaries to verify + // that they compile. + CompileMode::Build + } else { + CompileMode::Test } - CompileMode::Test => { - let mut base = pkg.targets().iter().filter(|t| { - t.tested() - }).map(|t| { - (t, if t.is_example() {build} else {profile}) - }).collect::>(); - - // Always compile the library if we're testing everything as - // it'll be needed for doctests - if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { - if t.doctested() { - base.push((t, build)); - } + } + CompileMode::Build => match *target.kind() { + TargetKind::Test => CompileMode::Test, + TargetKind::Bench => CompileMode::Bench, + _ => CompileMode::Build, + }, + // `CompileMode::Bench` is only used to inform `filter_default_targets` + // which command is being used (`cargo bench`). Afterwards, tests + // and benches are treated identically. Switching the mode allows + // de-duplication of units that are essentially identical. For + // example, `cargo build --all-targets --release` creates the units + // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench) + // and since these are the same, we want them to be de-duplicated in + // `unit_dependencies`. + CompileMode::Bench => CompileMode::Test, + _ => target_mode, + }; + // Plugins or proc macros should be built for the host. + let kind = if target.for_host() { + Kind::Host + } else { + default_arch_kind + }; + let profile = profiles.get_profile( + pkg.package_id(), + ws.is_member(pkg), + unit_for, + target_mode, + bcx.build_config.release, + ); + bcx.units.intern(pkg, target, profile, kind, target_mode) + }; + + // Create a list of proposed targets. + let mut proposals: Vec> = Vec::new(); + + match *filter { + CompileFilter::Default { + required_features_filterable, + } => { + for pkg in packages { + let default = filter_default_targets(pkg.targets(), bcx.build_config.mode); + proposals.extend(default.into_iter().map(|target| Proposal { + pkg, + target, + requires_features: !required_features_filterable, + mode: bcx.build_config.mode, + })); + if bcx.build_config.mode == CompileMode::Test { + if let Some(t) = pkg + .targets() + .iter() + .find(|t| t.is_lib() && t.doctested() && t.doctestable()) + { + proposals.push(Proposal { + pkg, + target: t, + requires_features: false, + mode: CompileMode::Doctest, + }); } - Ok(base) - } - CompileMode::Build => { - Ok(pkg.targets().iter().filter(|t| { - t.is_bin() || t.is_lib() - }).map(|t| (t, profile)).collect()) - } - CompileMode::Doc { .. } => { - Ok(pkg.targets().iter().filter(|t| t.documented()) - .map(|t| (t, profile)).collect()) } } } - CompileFilter::Only { lib, bins, examples, tests, benches } => { - let mut targets = Vec::new(); - - if lib { - if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { - targets.push((t, profile)); - } else { - return Err(human(format!("no library targets found"))) + CompileFilter::Only { + all_targets, + ref lib, + ref bins, + ref examples, + ref tests, + ref benches, + } => { + if *lib != LibRule::False { + let mut libs = Vec::new(); + for proposal in + filter_targets(packages, Target::is_lib, false, bcx.build_config.mode) + { + let Proposal { target, pkg, .. } = proposal; + if bcx.build_config.mode.is_doc_test() && !target.doctestable() { + ws.config().shell().warn(format!( + "doc tests are not supported for crate type(s) `{}` in package `{}`", + target.rustc_crate_types().join(", "), + pkg.name() + ))?; + } else { + libs.push(proposal) + } + } + if !all_targets && libs.is_empty() && *lib == LibRule::True { + let names = packages.iter().map(|pkg| pkg.name()).collect::>(); + if names.len() == 1 { + failure::bail!("no library targets found in package `{}`", names[0]); + } else { + failure::bail!( + "no library targets found in packages: {}", + names.join(", ") + ); + } } + proposals.extend(libs); } - { - let mut find = |names: &[String], desc, kind, profile| { - for name in names { - let target = pkg.targets().iter().find(|t| { - t.name() == *name && *t.kind() == kind - }); - let t = match target { - Some(t) => t, - None => return Err(human(format!("no {} target \ - named `{}`", - desc, name))), - }; - debug!("found {} `{}`", desc, name); - targets.push((t, profile)); - } - Ok(()) - }; - try!(find(bins, "bin", TargetKind::Bin, profile)); - try!(find(examples, "example", TargetKind::Example, build)); - try!(find(tests, "test", TargetKind::Test, test)); - try!(find(benches, "bench", TargetKind::Bench, &profiles.bench)); + // If `--tests` was specified, add all targets that would be + // generated by `cargo test`. + let test_filter = match tests { + FilterRule::All => Target::tested, + FilterRule::Just(_) => Target::is_test, + }; + let test_mode = match bcx.build_config.mode { + CompileMode::Build => CompileMode::Test, + CompileMode::Check { .. } => CompileMode::Check { test: true }, + _ => bcx.build_config.mode, + }; + // If `--benches` was specified, add all targets that would be + // generated by `cargo bench`. + let bench_filter = match benches { + FilterRule::All => Target::benched, + FilterRule::Just(_) => Target::is_bench, + }; + let bench_mode = match bcx.build_config.mode { + CompileMode::Build => CompileMode::Bench, + CompileMode::Check { .. } => CompileMode::Check { test: true }, + _ => bcx.build_config.mode, + }; + + proposals.extend(list_rule_targets( + packages, + bins, + "bin", + Target::is_bin, + bcx.build_config.mode, + )?); + proposals.extend(list_rule_targets( + packages, + examples, + "example", + Target::is_example, + bcx.build_config.mode, + )?); + proposals.extend(list_rule_targets( + packages, + tests, + "test", + test_filter, + test_mode, + )?); + proposals.extend(list_rule_targets( + packages, + benches, + "bench", + bench_filter, + bench_mode, + )?); + } + } + + // Only include targets that are libraries or have all required + // features available. + let mut features_map = HashMap::new(); + let mut units = HashSet::new(); + for Proposal { + pkg, + target, + requires_features, + mode, + } in proposals + { + let unavailable_features = match target.required_features() { + Some(rf) => { + let features = features_map + .entry(pkg) + .or_insert_with(|| resolve_all_features(resolve, pkg.package_id())); + rf.iter().filter(|f| !features.contains_key(*f)).collect() } - Ok(targets) + None => Vec::new(), + }; + if target.is_lib() || unavailable_features.is_empty() { + let unit = new_unit(pkg, target, mode); + units.insert(unit); + } else if requires_features { + let required_features = target.required_features().unwrap(); + let quoted_required_features: Vec = required_features + .iter() + .map(|s| format!("`{}`", s)) + .collect(); + failure::bail!( + "target `{}` in package `{}` requires the features: {}\n\ + Consider enabling them by passing, e.g., `--features=\"{}\"`", + target.name(), + pkg.name(), + quoted_required_features.join(", "), + required_features.join(" ") + ); } - }; + // else, silently skip target. + } + Ok(units.into_iter().collect()) } -/// Read the `paths` configuration variable to discover all path overrides that -/// have been configured. -fn source_ids_from_config(config: &Config, cur_path: &Path) - -> CargoResult> { +fn resolve_all_features( + resolve_with_overrides: &Resolve, + package_id: PackageId, +) -> HashMap> { + let mut features = resolve_with_overrides.features(package_id).clone(); + + // Include features enabled for use by dependencies so targets can also use them with the + // required-features field when deciding whether to be built or skipped. + for (dep, _) in resolve_with_overrides.deps(package_id) { + for feature in resolve_with_overrides.features(dep) { + features.insert(dep.name().to_string() + "/" + feature.0, feature.1.clone()); + } + } - let configs = try!(config.values()); - debug!("loaded config; configs={:?}", configs); - let config_paths = match configs.get("paths") { - Some(cfg) => cfg, - None => return Ok(Vec::new()) - }; - let paths = try!(config_paths.list().chain_error(|| { - internal("invalid configuration for the key `paths`") - })); - - paths.iter().map(|&(ref s, ref p)| { - // The path listed next to the string is the config file in which the - // key was located, so we want to pop off the `.cargo/config` component - // to get the directory containing the `.cargo` folder. - p.parent().unwrap().parent().unwrap().join(s) - }).filter(|p| { - // Make sure we don't override the local package, even if it's in the - // list of override paths. - cur_path != &**p - }).map(|p| SourceId::for_path(&p)).collect() + features } -/// Parse all config files to learn about build configuration. Currently -/// configured options are: -/// -/// * build.jobs -/// * target.$target.ar -/// * target.$target.linker -/// * target.$target.libfoo.metadata -fn scrape_build_config(config: &Config, - jobs: Option, - target: Option) - -> CargoResult { - let cfg_jobs = match try!(config.get_i64("build.jobs")) { - Some((n, p)) => { - if n <= 0 { - return Err(human(format!("build.jobs must be positive, \ - but found {} in {:?}", n, p))); - } else if n >= u32::max_value() as i64 { - return Err(human(format!("build.jobs is too large: \ - found {} in {:?}", n, p))); - } else { - Some(n as u32) - } +/// Given a list of all targets for a package, filters out only the targets +/// that are automatically included when the user doesn't specify any targets. +fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> { + match mode { + CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(), + CompileMode::Test => targets + .iter() + .filter(|t| t.tested() || t.is_example()) + .collect(), + CompileMode::Build | CompileMode::Check { .. } => targets + .iter() + .filter(|t| t.is_bin() || t.is_lib()) + .collect(), + CompileMode::Doc { .. } => { + // `doc` does lib and bins (bin with same name as lib is skipped). + targets + .iter() + .filter(|t| { + t.documented() + && (!t.is_bin() + || !targets.iter().any(|l| l.is_lib() && l.name() == t.name())) + }) + .collect() } - None => None, - }; - let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32); - let mut base = ops::BuildConfig { - jobs: jobs, - requested_target: target.clone(), - ..Default::default() - }; - base.host = try!(scrape_target_config(config, &config.rustc_info().host)); - base.target = match target.as_ref() { - Some(triple) => try!(scrape_target_config(config, &triple)), - None => base.host.clone(), - }; - Ok(base) + CompileMode::Doctest | CompileMode::RunCustomBuild => panic!("Invalid mode {:?}", mode), + } } -fn scrape_target_config(config: &Config, triple: &str) - -> CargoResult { - - let key = format!("target.{}", triple); - let mut ret = ops::TargetConfig { - ar: try!(config.get_path(&format!("{}.ar", key))), - linker: try!(config.get_path(&format!("{}.linker", key))), - overrides: HashMap::new(), - }; - let table = match try!(config.get_table(&key)) { - Some((table, _)) => table, - None => return Ok(ret), - }; - for (lib_name, _) in table.into_iter() { - if lib_name == "ar" || lib_name == "linker" { continue } - - let mut output = BuildOutput { - library_paths: Vec::new(), - library_links: Vec::new(), - cfgs: Vec::new(), - metadata: Vec::new(), - }; - let key = format!("{}.{}", key, lib_name); - let table = try!(config.get_table(&key)).unwrap().0; - for (k, _) in table.into_iter() { - let key = format!("{}.{}", key, k); - match try!(config.get(&key)).unwrap() { - ConfigValue::String(v, path) => { - if k == "rustc-flags" { - let whence = format!("in `{}` (in {})", key, - path.display()); - let (paths, links) = try!( - BuildOutput::parse_rustc_flags(&v, &whence) - ); - output.library_paths.extend(paths.into_iter()); - output.library_links.extend(links.into_iter()); - } else { - output.metadata.push((k, v)); - } - }, - ConfigValue::List(a, p) => { - if k == "rustc-link-lib" { - output.library_links.extend(a.into_iter().map(|v| v.0)); - } else if k == "rustc-link-search" { - output.library_paths.extend(a.into_iter().map(|v| { - PathBuf::from(&v.0) - })); - } else if k == "rustc-cfg" { - output.cfgs.extend(a.into_iter().map(|v| v.0)); - } else { - try!(config.expected("string", &k, - ConfigValue::List(a, p))); - } - }, - // technically could be a list too, but that's the exception to - // the rule... - cv => { try!(config.expected("string", &k, cv)); } +/// Returns a list of proposed targets based on command-line target selection flags. +fn list_rule_targets<'a>( + packages: &[&'a Package], + rule: &FilterRule, + target_desc: &'static str, + is_expected_kind: fn(&Target) -> bool, + mode: CompileMode, +) -> CargoResult>> { + let mut proposals = Vec::new(); + match rule { + FilterRule::All => { + proposals.extend(filter_targets(packages, is_expected_kind, false, mode)) + } + FilterRule::Just(names) => { + for name in names { + proposals.extend(find_named_targets( + packages, + name, + target_desc, + is_expected_kind, + mode, + )?); } } - ret.overrides.insert(lib_name, output); } + Ok(proposals) +} - Ok(ret) +/// Finds the targets for a specifically named target. +fn find_named_targets<'a>( + packages: &[&'a Package], + target_name: &str, + target_desc: &'static str, + is_expected_kind: fn(&Target) -> bool, + mode: CompileMode, +) -> CargoResult>> { + let filter = |t: &Target| t.name() == target_name && is_expected_kind(t); + let proposals = filter_targets(packages, filter, true, mode); + if proposals.is_empty() { + let targets = packages.iter().flat_map(|pkg| { + pkg.targets() + .iter() + .filter(|target| is_expected_kind(target)) + }); + let suggestion = closest_msg(target_name, targets, |t| t.name()); + failure::bail!( + "no {} target named `{}`{}", + target_desc, + target_name, + suggestion + ); + } + Ok(proposals) +} + +fn filter_targets<'a>( + packages: &[&'a Package], + predicate: impl Fn(&Target) -> bool, + requires_features: bool, + mode: CompileMode, +) -> Vec> { + let mut proposals = Vec::new(); + for pkg in packages { + for target in pkg.targets().iter().filter(|t| predicate(t)) { + proposals.push(Proposal { + pkg, + target, + requires_features, + mode, + }); + } + } + proposals } diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs index 8090a35fe18..fb9aa7ec49b 100644 --- a/src/cargo/ops/cargo_doc.rs +++ b/src/cargo/ops/cargo_doc.rs @@ -1,102 +1,100 @@ -use std::collections::HashSet; +use std::collections::HashMap; use std::fs; use std::path::Path; -use std::process::Command; -use core::PackageIdSpec; -use core::source::Source; -use ops; -use sources::PathSource; -use util::{CargoResult, human}; +use failure::Fail; +use opener; +use crate::core::resolver::ResolveOpts; +use crate::core::Workspace; +use crate::ops; +use crate::util::CargoResult; + +/// Strongly typed options for the `cargo doc` command. +#[derive(Debug)] pub struct DocOptions<'a> { + /// Whether to attempt to open the browser after compiling the docs pub open_result: bool, + /// Options to pass through to the compiler pub compile_opts: ops::CompileOptions<'a>, } -pub fn doc(manifest_path: &Path, - options: &DocOptions) -> CargoResult<()> { - let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), - options.compile_opts.config)); - try!(source.update()); - let package = try!(source.root_package()); +/// Main method for `cargo doc`. +pub fn doc(ws: &Workspace<'_>, options: &DocOptions<'_>) -> CargoResult<()> { + let specs = options.compile_opts.spec.to_package_id_specs(ws)?; + let opts = ResolveOpts::new( + /*dev_deps*/ true, + &options.compile_opts.features, + options.compile_opts.all_features, + !options.compile_opts.no_default_features, + ); + let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?; + let (packages, resolve_with_overrides) = resolve; + + let ids = specs + .iter() + .map(|s| s.query(resolve_with_overrides.iter())) + .collect::>>()?; + let pkgs = packages.get_many(ids)?; - let mut lib_names = HashSet::new(); - let mut bin_names = HashSet::new(); - if options.compile_opts.spec.is_none() { + let mut lib_names = HashMap::new(); + let mut bin_names = HashMap::new(); + let mut names = Vec::new(); + for package in &pkgs { for target in package.targets().iter().filter(|t| t.documented()) { if target.is_lib() { - assert!(lib_names.insert(target.crate_name())); - } else { - assert!(bin_names.insert(target.crate_name())); - } - } - for bin in bin_names.iter() { - if lib_names.contains(bin) { - return Err(human("Cannot document a package where a library \ - and a binary have the same name. Consider \ - renaming one or marking the target as \ - `doc = false`")) + if let Some(prev) = lib_names.insert(target.crate_name(), package) { + failure::bail!( + "The library `{}` is specified by packages `{}` and \ + `{}` but can only be documented once. Consider renaming \ + or marking one of the targets as `doc = false`.", + target.crate_name(), + prev, + package + ); + } + } else if let Some(prev) = bin_names.insert(target.crate_name(), package) { + failure::bail!( + "The binary `{}` is specified by packages `{}` and \ + `{}` but can be documented only once. Consider renaming \ + or marking one of the targets as `doc = false`.", + target.crate_name(), + prev, + package + ); } + names.push(target.crate_name()); } } - try!(ops::compile(manifest_path, &options.compile_opts)); + ops::compile(ws, &options.compile_opts)?; if options.open_result { - let name = match options.compile_opts.spec { - Some(spec) => try!(PackageIdSpec::parse(spec)).name().replace("-", "_").to_string(), - None => { - match lib_names.iter().chain(bin_names.iter()).nth(0) { - Some(s) => s.to_string(), - None => return Ok(()) - } - } + let name = match names.first() { + Some(s) => s.to_string(), + None => return Ok(()), }; - let target_dir = options.compile_opts.config.target_dir(&package); + // Don't bother locking here as if this is getting deleted there's + // nothing we can do about it and otherwise if it's getting overwritten + // then that's also ok! + let mut target_dir = ws.target_dir(); + if let Some(ref triple) = options.compile_opts.build_config.requested_target { + target_dir.push(Path::new(triple).file_stem().unwrap()); + } let path = target_dir.join("doc").join(&name).join("index.html"); + let path = path.into_path_unlocked(); if fs::metadata(&path).is_ok() { - open_docs(&path); + let mut shell = options.compile_opts.config.shell(); + shell.status("Opening", path.display())?; + if let Err(e) = opener::open(&path) { + shell.warn(format!("Couldn't open docs: {}", e))?; + for cause in (&e as &dyn Fail).iter_chain() { + shell.warn(format!("Caused by:\n {}", cause))?; + } + } } } Ok(()) } - -#[cfg(not(any(target_os = "windows", target_os = "macos")))] -fn open_docs(path: &Path) { - // trying xdg-open - match Command::new("xdg-open").arg(path).status() { - Ok(_) => return, - Err(_) => () - }; - - // trying gnome-open - match Command::new("gnome-open").arg(path).status() { - Ok(_) => return, - Err(_) => () - }; - - // trying kde-open - match Command::new("kde-open").arg(path).status() { - Ok(_) => return, - Err(_) => () - }; -} - -#[cfg(target_os = "windows")] -fn open_docs(path: &Path) { - match Command::new("cmd").arg("/C").arg("start").arg("").arg(path).status() { - Ok(_) => return, - Err(_) => () - }; -} - -#[cfg(target_os = "macos")] -fn open_docs(path: &Path) { - match Command::new("open").arg(path).status() { - Ok(_) => return, - Err(_) => () - }; -} diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index cbd3382f90d..37ccc07c438 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -1,25 +1,65 @@ -use std::path::Path; +use crate::core::compiler::{BuildConfig, CompileMode, Kind, TargetInfo}; +use crate::core::{PackageSet, Resolve, Workspace}; +use crate::ops; +use crate::util::CargoResult; +use crate::util::Config; +use std::collections::HashSet; -use core::registry::PackageRegistry; -use core::{Source, PackageId}; -use ops; -use sources::PathSource; -use util::{CargoResult, Config, human, ChainError}; +pub struct FetchOptions<'a> { + pub config: &'a Config, + /// The target arch triple to fetch dependencies for + pub target: Option, +} /// Executes `cargo fetch`. -pub fn fetch(manifest_path: &Path, config: &Config) -> CargoResult<()> { - let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), - config)); - try!(source.update()); - let package = try!(source.root_package()); +pub fn fetch<'a>( + ws: &Workspace<'a>, + options: &FetchOptions<'a>, +) -> CargoResult<(Resolve, PackageSet<'a>)> { + let (packages, resolve) = ops::resolve_ws(ws)?; + + let jobs = Some(1); + let config = ws.config(); + let build_config = BuildConfig::new(config, jobs, &options.target, CompileMode::Build)?; + let rustc = config.load_global_rustc(Some(ws))?; + let target_info = + TargetInfo::new(config, &build_config.requested_target, &rustc, Kind::Target)?; + { + let mut fetched_packages = HashSet::new(); + let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::>(); + let mut to_download = Vec::new(); + + while let Some(id) = deps_to_fetch.pop() { + if !fetched_packages.insert(id) { + continue; + } - let mut registry = PackageRegistry::new(config); - registry.preload(package.package_id().source_id(), Box::new(source)); - let resolve = try!(ops::resolve_pkg(&mut registry, &package)); + to_download.push(id); + let deps = resolve + .deps(id) + .filter(|&(_id, deps)| { + deps.iter().any(|d| { + // If no target was specified then all dependencies can + // be fetched. + let target = match options.target { + Some(ref t) => t, + None => return true, + }; + // If this dependency is only available for certain + // platforms, make sure we're only fetching it for that + // platform. + let platform = match d.platform() { + Some(p) => p, + None => return true, + }; + platform.matches(target, target_info.cfg()) + }) + }) + .map(|(id, _deps)| id); + deps_to_fetch.extend(deps); + } + packages.get_many(to_download)?; + } - let ids: Vec = resolve.iter().cloned().collect(); - try!(registry.get(&ids).chain_error(|| { - human("unable to get packages from source") - })); - Ok(()) + Ok((resolve, packages)) } diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index f561664b876..45c0aa68874 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -1,180 +1,242 @@ -use std::collections::{HashMap, HashSet}; -use std::path::Path; - -use core::PackageId; -use core::registry::PackageRegistry; -use core::{Source, Resolve, SourceId}; -use core::resolver::Method; -use ops; -use sources::{PathSource}; -use util::config::{Config}; -use util::{CargoResult, human}; +use std::collections::{BTreeMap, HashSet}; + +use log::debug; +use termcolor::Color::{self, Cyan, Green, Red}; + +use crate::core::registry::PackageRegistry; +use crate::core::resolver::ResolveOpts; +use crate::core::PackageId; +use crate::core::{Resolve, SourceId, Workspace}; +use crate::ops; +use crate::util::config::Config; +use crate::util::CargoResult; pub struct UpdateOptions<'a> { pub config: &'a Config, - pub to_update: Option<&'a str>, + pub to_update: Vec, pub precise: Option<&'a str>, pub aggressive: bool, + pub dry_run: bool, } -pub fn generate_lockfile(manifest_path: &Path, config: &Config) - -> CargoResult<()> { - let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), - config)); - try!(source.update()); - let package = try!(source.root_package()); - let mut registry = PackageRegistry::new(config); - registry.preload(package.package_id().source_id(), Box::new(source)); - let resolve = try!(ops::resolve_with_previous(&mut registry, &package, - Method::Everything, - None, None)); - try!(ops::write_pkg_lockfile(&package, &resolve)); +pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> { + let mut registry = PackageRegistry::new(ws.config())?; + let resolve = ops::resolve_with_previous( + &mut registry, + ws, + ResolveOpts::everything(), + None, + None, + &[], + true, + )?; + ops::write_pkg_lockfile(ws, &resolve)?; Ok(()) } -pub fn update_lockfile(manifest_path: &Path, - opts: &UpdateOptions) -> CargoResult<()> { - let mut source = try!(PathSource::for_path(manifest_path.parent().unwrap(), - opts.config)); - try!(source.update()); - let package = try!(source.root_package()); +pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoResult<()> { + if opts.aggressive && opts.precise.is_some() { + failure::bail!("cannot specify both aggressive and precise simultaneously") + } - let previous_resolve = match try!(ops::load_pkg_lockfile(&package)) { - Some(resolve) => resolve, - None => return Err(human("A Cargo.lock must exist before it is updated")) - }; + if ws.members().count() == 0 { + failure::bail!("you can't generate a lockfile for an empty workspace.") + } - if opts.aggressive && opts.precise.is_some() { - return Err(human("cannot specify both aggressive and precise \ - simultaneously")) + if opts.config.offline() { + failure::bail!("you can't update in the offline mode"); } - let mut registry = PackageRegistry::new(opts.config); + // Updates often require a lot of modifications to the registry, so ensure + // that we're synchronized against other Cargos. + let _lock = ws.config().acquire_package_cache_lock()?; + + let previous_resolve = match ops::load_pkg_lockfile(ws)? { + Some(resolve) => resolve, + None => { + match opts.precise { + None => return generate_lockfile(ws), + + // Precise option specified, so calculate a previous_resolve required + // by precise package update later. + Some(_) => { + let mut registry = PackageRegistry::new(opts.config)?; + ops::resolve_with_previous( + &mut registry, + ws, + ResolveOpts::everything(), + None, + None, + &[], + true, + )? + } + } + } + }; + let mut registry = PackageRegistry::new(opts.config)?; let mut to_avoid = HashSet::new(); - match opts.to_update { - Some(name) => { - let dep = try!(previous_resolve.query(name)); + if opts.to_update.is_empty() { + to_avoid.extend(previous_resolve.iter()); + } else { + let mut sources = Vec::new(); + for name in opts.to_update.iter() { + let dep = previous_resolve.query(name)?; if opts.aggressive { - fill_with_deps(&previous_resolve, dep, &mut to_avoid, - &mut HashSet::new()); + fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new()); } else { to_avoid.insert(dep); - match opts.precise { + sources.push(match opts.precise { Some(precise) => { // TODO: see comment in `resolve.rs` as well, but this // seems like a pretty hokey reason to single out // the registry as well. let precise = if dep.source_id().is_registry() { - format!("{}={}", dep.name(), precise) + format!("{}={}->{}", dep.name(), dep.version(), precise) } else { precise.to_string() }; - let precise = dep.source_id().clone() - .with_precise(Some(precise)); - try!(registry.add_sources(&[precise])); + dep.source_id().with_precise(Some(precise)) } - None => { - let imprecise = dep.source_id().clone() - .with_precise(None); - try!(registry.add_sources(&[imprecise])); - } - } + None => dep.source_id().with_precise(None), + }); } } - None => to_avoid.extend(previous_resolve.iter()), + + registry.add_sources(sources)?; } - registry.preload(package.package_id().source_id(), Box::new(source)); - let resolve = try!(ops::resolve_with_previous(&mut registry, - &package, - Method::Everything, - Some(&previous_resolve), - Some(&to_avoid))); + let resolve = ops::resolve_with_previous( + &mut registry, + ws, + ResolveOpts::everything(), + Some(&previous_resolve), + Some(&to_avoid), + &[], + true, + )?; // Summarize what is changing for the user. - let print_change = |status: &str, msg: String| { - opts.config.shell().status(status, msg) + let print_change = |status: &str, msg: String, color: Color| { + opts.config.shell().status_with_color(status, msg, color) }; for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) { if removed.len() == 1 && added.len() == 1 { - if removed[0].source_id().is_git() { - try!(print_change("Updating", format!("{} -> #{}", + let msg = if removed[0].source_id().is_git() { + format!( + "{} -> #{}", removed[0], - &added[0].source_id().precise().unwrap()[..8]))); + &added[0].source_id().precise().unwrap()[..8] + ) } else { - try!(print_change("Updating", format!("{} -> v{}", - removed[0], - added[0].version()))); - } - } - else { + format!("{} -> v{}", removed[0], added[0].version()) + }; + print_change("Updating", msg, Green)?; + } else { for package in removed.iter() { - try!(print_change("Removing", format!("{}", package))); + print_change("Removing", format!("{}", package), Red)?; } for package in added.iter() { - try!(print_change("Adding", format!("{}", package))); + print_change("Adding", format!("{}", package), Cyan)?; } } } - - try!(ops::write_pkg_lockfile(&package, &resolve)); + if opts.dry_run { + opts.config + .shell() + .warn("not updating lockfile due to dry run")?; + } else { + ops::write_pkg_lockfile(ws, &resolve)?; + } return Ok(()); - fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId, - set: &mut HashSet<&'a PackageId>, - visited: &mut HashSet<&'a PackageId>) { - if !visited.insert(dep) { return } + fn fill_with_deps<'a>( + resolve: &'a Resolve, + dep: PackageId, + set: &mut HashSet, + visited: &mut HashSet, + ) { + if !visited.insert(dep) { + return; + } set.insert(dep); - match resolve.deps(dep) { - Some(deps) => { - for dep in deps { - fill_with_deps(resolve, dep, set, visited); - } - } - None => {} + for (dep, _) in resolve.deps_not_replaced(dep) { + fill_with_deps(resolve, dep, set, visited); } } - fn compare_dependency_graphs<'a>(previous_resolve: &'a Resolve, - resolve: &'a Resolve) -> - Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> { - // Map (package name, package source) to (removed versions, added versions). - fn changes_key<'a>(dep: &'a PackageId) -> (&'a str, &'a SourceId) { - (dep.name(), dep.source_id()) + fn compare_dependency_graphs( + previous_resolve: &Resolve, + resolve: &Resolve, + ) -> Vec<(Vec, Vec)> { + fn key(dep: PackageId) -> (&'static str, SourceId) { + (dep.name().as_str(), dep.source_id()) } - fn vec_subtract(a: &[T], b: &[T]) -> Vec - where T: Ord + Clone { - let mut result = a.to_owned(); - let mut b = b.to_owned(); - b.sort(); - result.retain(|x| b.binary_search(x).is_err()); - result + // Removes all package IDs in `b` from `a`. Note that this is somewhat + // more complicated because the equality for source IDs does not take + // precise versions into account (e.g., git shas), but we want to take + // that into account here. + fn vec_subtract(a: &[PackageId], b: &[PackageId]) -> Vec { + a.iter() + .filter(|a| { + // If this package ID is not found in `b`, then it's definitely + // in the subtracted set. + let i = match b.binary_search(a) { + Ok(i) => i, + Err(..) => return true, + }; + + // If we've found `a` in `b`, then we iterate over all instances + // (we know `b` is sorted) and see if they all have different + // precise versions. If so, then `a` isn't actually in `b` so + // we'll let it through. + // + // Note that we only check this for non-registry sources, + // however, as registries contain enough version information in + // the package ID to disambiguate. + if a.source_id().is_registry() { + return false; + } + b[i..] + .iter() + .take_while(|b| a == b) + .all(|b| a.source_id().precise() != b.source_id().precise()) + }) + .cloned() + .collect() } - let mut changes = HashMap::new(); - + // Map `(package name, package source)` to `(removed versions, added versions)`. + let mut changes = BTreeMap::new(); + let empty = (Vec::new(), Vec::new()); for dep in previous_resolve.iter() { - changes.insert(changes_key(dep), (vec![dep], vec![])); + changes + .entry(key(dep)) + .or_insert_with(|| empty.clone()) + .0 + .push(dep); } for dep in resolve.iter() { - let (_, ref mut added) = *changes.entry(changes_key(dep)) - .or_insert_with(|| (vec![], vec![])); - added.push(dep); + changes + .entry(key(dep)) + .or_insert_with(|| empty.clone()) + .1 + .push(dep); } - for (_, v) in changes.iter_mut() { + for v in changes.values_mut() { let (ref mut old, ref mut new) = *v; + old.sort(); + new.sort(); let removed = vec_subtract(old, new); let added = vec_subtract(new, old); *old = removed; *new = added; } + debug!("{:#?}", changes); - // Sort the packages by their names. - let mut packages: Vec<_> = changes.keys().map(|x| *x).collect(); - packages.sort(); - packages.iter().map(|k| changes[k].clone()).collect() + changes.into_iter().map(|(_, v)| v).collect() } } diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs new file mode 100644 index 00000000000..04f9befe33b --- /dev/null +++ b/src/cargo/ops/cargo_install.rs @@ -0,0 +1,523 @@ +use std::collections::{BTreeMap, BTreeSet, HashSet}; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use std::{env, fs}; + +use failure::{bail, format_err}; +use tempfile::Builder as TempFileBuilder; + +use crate::core::compiler::Freshness; +use crate::core::compiler::{DefaultExecutor, Executor}; +use crate::core::resolver::ResolveOpts; +use crate::core::{Edition, PackageId, PackageIdSpec, Source, SourceId, Workspace}; +use crate::ops; +use crate::ops::common_for_install_and_uninstall::*; +use crate::sources::{GitSource, SourceConfigMap}; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{paths, Config, Filesystem}; + +struct Transaction { + bins: Vec, +} + +impl Transaction { + fn success(mut self) { + self.bins.clear(); + } +} + +impl Drop for Transaction { + fn drop(&mut self) { + for bin in self.bins.iter() { + let _ = paths::remove_file(bin); + } + } +} + +pub fn install( + root: Option<&str>, + krates: Vec<&str>, + source_id: SourceId, + from_cwd: bool, + vers: Option<&str>, + opts: &ops::CompileOptions<'_>, + force: bool, + no_track: bool, +) -> CargoResult<()> { + let root = resolve_root(root, opts.config)?; + let map = SourceConfigMap::new(opts.config)?; + + let (installed_anything, scheduled_error) = if krates.len() <= 1 { + install_one( + &root, + &map, + krates.into_iter().next(), + source_id, + from_cwd, + vers, + opts, + force, + no_track, + true, + )?; + (true, false) + } else { + let mut succeeded = vec![]; + let mut failed = vec![]; + let mut first = true; + for krate in krates { + let root = root.clone(); + let map = map.clone(); + match install_one( + &root, + &map, + Some(krate), + source_id, + from_cwd, + vers, + opts, + force, + no_track, + first, + ) { + Ok(()) => succeeded.push(krate), + Err(e) => { + crate::handle_error(&e, &mut opts.config.shell()); + failed.push(krate) + } + } + first = false; + } + + let mut summary = vec![]; + if !succeeded.is_empty() { + summary.push(format!("Successfully installed {}!", succeeded.join(", "))); + } + if !failed.is_empty() { + summary.push(format!( + "Failed to install {} (see error(s) above).", + failed.join(", ") + )); + } + if !succeeded.is_empty() || !failed.is_empty() { + opts.config.shell().status("Summary", summary.join(" "))?; + } + + (!succeeded.is_empty(), !failed.is_empty()) + }; + + if installed_anything { + // Print a warning that if this directory isn't in PATH that they won't be + // able to run these commands. + let dst = root.join("bin").into_path_unlocked(); + let path = env::var_os("PATH").unwrap_or_default(); + for path in env::split_paths(&path) { + if path == dst { + return Ok(()); + } + } + + opts.config.shell().warn(&format!( + "be sure to add `{}` to your PATH to be \ + able to run the installed binaries", + dst.display() + ))?; + } + + if scheduled_error { + bail!("some crates failed to install"); + } + + Ok(()) +} + +fn install_one( + root: &Filesystem, + map: &SourceConfigMap<'_>, + krate: Option<&str>, + source_id: SourceId, + from_cwd: bool, + vers: Option<&str>, + opts: &ops::CompileOptions<'_>, + force: bool, + no_track: bool, + is_first_install: bool, +) -> CargoResult<()> { + let config = opts.config; + + let pkg = if source_id.is_git() { + select_pkg( + GitSource::new(source_id, config)?, + krate, + vers, + config, + true, + &mut |git| git.read_packages(), + )? + } else if source_id.is_path() { + let mut src = path_source(source_id, config)?; + if !src.path().is_dir() { + bail!( + "`{}` is not a directory. \ + --path must point to a directory containing a Cargo.toml file.", + src.path().display() + ) + } + if !src.path().join("Cargo.toml").exists() { + if from_cwd { + bail!( + "`{}` is not a crate root; specify a crate to \ + install from crates.io, or use --path or --git to \ + specify an alternate source", + src.path().display() + ); + } else { + bail!( + "`{}` does not contain a Cargo.toml file. \ + --path must point to a directory containing a Cargo.toml file.", + src.path().display() + ) + } + } + src.update()?; + select_pkg(src, krate, vers, config, false, &mut |path| { + path.read_packages() + })? + } else { + select_pkg( + map.load(source_id, &HashSet::new())?, + krate, + vers, + config, + is_first_install, + &mut |_| { + bail!( + "must specify a crate to install from \ + crates.io, or use --path or --git to \ + specify alternate source" + ) + }, + )? + }; + + let mut td_opt = None; + let mut needs_cleanup = false; + let overidden_target_dir = if source_id.is_path() { + None + } else if let Some(dir) = config.target_dir()? { + Some(dir) + } else if let Ok(td) = TempFileBuilder::new().prefix("cargo-install").tempdir() { + let p = td.path().to_owned(); + td_opt = Some(td); + Some(Filesystem::new(p)) + } else { + needs_cleanup = true; + Some(Filesystem::new(config.cwd().join("target-install"))) + }; + + let mut ws = match overidden_target_dir { + Some(dir) => Workspace::ephemeral(pkg, config, Some(dir), false)?, + None => { + let mut ws = Workspace::new(pkg.manifest_path(), config)?; + ws.set_require_optional_deps(false); + ws + } + }; + ws.set_ignore_lock(config.lock_update_allowed()); + let pkg = ws.current()?; + + if from_cwd { + if pkg.manifest().edition() == Edition::Edition2015 { + config.shell().warn( + "Using `cargo install` to install the binaries for the \ + package in current working directory is deprecated, \ + use `cargo install --path .` instead. \ + Use `cargo build` if you want to simply build the package.", + )? + } else { + bail!( + "Using `cargo install` to install the binaries for the \ + package in current working directory is no longer supported, \ + use `cargo install --path .` instead. \ + Use `cargo build` if you want to simply build the package." + ) + } + }; + + // For bare `cargo install` (no `--bin` or `--example`), check if there is + // *something* to install. Explicit `--bin` or `--example` flags will be + // checked at the start of `compile_ws`. + if !opts.filter.is_specific() && !pkg.targets().iter().any(|t| t.is_bin()) { + bail!("specified package `{}` has no binaries", pkg); + } + + // Preflight checks to check up front whether we'll overwrite something. + // We have to check this again afterwards, but may as well avoid building + // anything if we're gonna throw it away anyway. + let dst = root.join("bin").into_path_unlocked(); + let rustc = config.load_global_rustc(Some(&ws))?; + let target = opts + .build_config + .requested_target + .as_ref() + .unwrap_or(&rustc.host) + .clone(); + + // Helper for --no-track flag to make sure it doesn't overwrite anything. + let no_track_duplicates = || -> CargoResult>> { + let duplicates: BTreeMap> = exe_names(pkg, &opts.filter) + .into_iter() + .filter(|name| dst.join(name).exists()) + .map(|name| (name, None)) + .collect(); + if !force && !duplicates.is_empty() { + let mut msg: Vec = duplicates + .iter() + .map(|(name, _)| format!("binary `{}` already exists in destination", name)) + .collect(); + msg.push("Add --force to overwrite".to_string()); + bail!("{}", msg.join("\n")); + } + Ok(duplicates) + }; + + // WARNING: no_track does not perform locking, so there is no protection + // of concurrent installs. + if no_track { + // Check for conflicts. + no_track_duplicates()?; + } else { + let tracker = InstallTracker::load(config, root)?; + let (freshness, _duplicates) = + tracker.check_upgrade(&dst, pkg, force, opts, &target, &rustc.verbose_version)?; + if freshness == Freshness::Fresh { + let msg = format!( + "package `{}` is already installed, use --force to override", + pkg + ); + config.shell().status("Ignored", &msg)?; + return Ok(()); + } + // Unlock while building. + drop(tracker); + } + + config.shell().status("Installing", pkg)?; + + check_yanked_install(&ws)?; + + let exec: Arc = Arc::new(DefaultExecutor); + let compile = ops::compile_ws(&ws, opts, &exec).chain_err(|| { + if let Some(td) = td_opt.take() { + // preserve the temporary directory, so the user can inspect it + td.into_path(); + } + + format_err!( + "failed to compile `{}`, intermediate artifacts can be \ + found at `{}`", + pkg, + ws.target_dir().display() + ) + })?; + let mut binaries: Vec<(&str, &Path)> = compile + .binaries + .iter() + .map(|bin| { + let name = bin.file_name().unwrap(); + if let Some(s) = name.to_str() { + Ok((s, bin.as_ref())) + } else { + bail!("Binary `{:?}` name can't be serialized into string", name) + } + }) + .collect::>()?; + if binaries.is_empty() { + bail!("no binaries are available for install using the selected features"); + } + // This is primarily to make testing easier. + binaries.sort_unstable(); + + let (tracker, duplicates) = if no_track { + (None, no_track_duplicates()?) + } else { + let tracker = InstallTracker::load(config, root)?; + let (_freshness, duplicates) = + tracker.check_upgrade(&dst, pkg, force, opts, &target, &rustc.verbose_version)?; + (Some(tracker), duplicates) + }; + + fs::create_dir_all(&dst)?; + + // Copy all binaries to a temporary directory under `dst` first, catching + // some failure modes (e.g., out of space) before touching the existing + // binaries. This directory will get cleaned up via RAII. + let staging_dir = TempFileBuilder::new() + .prefix("cargo-install") + .tempdir_in(&dst)?; + for &(bin, src) in binaries.iter() { + let dst = staging_dir.path().join(bin); + // Try to move if `target_dir` is transient. + if !source_id.is_path() && fs::rename(src, &dst).is_ok() { + continue; + } + fs::copy(src, &dst).chain_err(|| { + format_err!("failed to copy `{}` to `{}`", src.display(), dst.display()) + })?; + } + + let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries + .iter() + .map(|&(bin, _)| bin) + .partition(|&bin| duplicates.contains_key(bin)); + + let mut installed = Transaction { bins: Vec::new() }; + let mut successful_bins = BTreeSet::new(); + + // Move the temporary copies into `dst` starting with new binaries. + for bin in to_install.iter() { + let src = staging_dir.path().join(bin); + let dst = dst.join(bin); + config.shell().status("Installing", dst.display())?; + fs::rename(&src, &dst).chain_err(|| { + format_err!("failed to move `{}` to `{}`", src.display(), dst.display()) + })?; + installed.bins.push(dst); + successful_bins.insert(bin.to_string()); + } + + // Repeat for binaries which replace existing ones but don't pop the error + // up until after updating metadata. + let replace_result = { + let mut try_install = || -> CargoResult<()> { + for &bin in to_replace.iter() { + let src = staging_dir.path().join(bin); + let dst = dst.join(bin); + config.shell().status("Replacing", dst.display())?; + fs::rename(&src, &dst).chain_err(|| { + format_err!("failed to move `{}` to `{}`", src.display(), dst.display()) + })?; + successful_bins.insert(bin.to_string()); + } + Ok(()) + }; + try_install() + }; + + if let Some(mut tracker) = tracker { + tracker.mark_installed( + pkg, + &successful_bins, + vers.map(|s| s.to_string()), + opts, + target, + rustc.verbose_version, + ); + + match tracker.save() { + Err(err) => replace_result.chain_err(|| err)?, + Ok(_) => replace_result?, + } + } + + // Reaching here means all actions have succeeded. Clean up. + installed.success(); + if needs_cleanup { + // Don't bother grabbing a lock as we're going to blow it all away + // anyway. + let target_dir = ws.target_dir().into_path_unlocked(); + paths::remove_dir_all(&target_dir)?; + } + + // Helper for creating status messages. + fn executables>(mut names: impl Iterator + Clone) -> String { + if names.clone().count() == 1 { + format!("(executable `{}`)", names.next().unwrap().as_ref()) + } else { + format!( + "(executables {})", + names + .map(|b| format!("`{}`", b.as_ref())) + .collect::>() + .join(", ") + ) + } + } + + if duplicates.is_empty() { + config.shell().status( + "Installed", + format!("package `{}` {}", pkg, executables(successful_bins.iter())), + )?; + Ok(()) + } else { + if !to_install.is_empty() { + config.shell().status( + "Installed", + format!("package `{}` {}", pkg, executables(to_install.iter())), + )?; + } + // Invert the duplicate map. + let mut pkg_map = BTreeMap::new(); + for (bin_name, opt_pkg_id) in &duplicates { + let key = opt_pkg_id.map_or_else(|| "unknown".to_string(), |pkg_id| pkg_id.to_string()); + pkg_map.entry(key).or_insert_with(Vec::new).push(bin_name); + } + for (pkg_descr, bin_names) in &pkg_map { + config.shell().status( + "Replaced", + format!( + "package `{}` with `{}` {}", + pkg_descr, + pkg, + executables(bin_names.iter()) + ), + )?; + } + Ok(()) + } +} + +fn check_yanked_install(ws: &Workspace<'_>) -> CargoResult<()> { + if ws.ignore_lock() || !ws.root().join("Cargo.lock").exists() { + return Ok(()); + } + let specs = vec![PackageIdSpec::from_package_id(ws.current()?.package_id())]; + // It would be best if `source` could be passed in here to avoid a + // duplicate "Updating", but since `source` is taken by value, then it + // wouldn't be available for `compile_ws`. + let (pkg_set, resolve) = ops::resolve_ws_with_opts(ws, ResolveOpts::everything(), &specs)?; + let mut sources = pkg_set.sources_mut(); + + // Checking the yanked status involves taking a look at the registry and + // maybe updating files, so be sure to lock it here. + let _lock = ws.config().acquire_package_cache_lock()?; + + for pkg_id in resolve.iter() { + if let Some(source) = sources.get_mut(pkg_id.source_id()) { + if source.is_yanked(pkg_id)? { + ws.config().shell().warn(format!( + "package `{}` in Cargo.lock is yanked in registry `{}`, \ + consider running without --locked", + pkg_id, + pkg_id.source_id().display_registry_name() + ))?; + } + } + } + + Ok(()) +} + +/// Display a list of installed binaries. +pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { + let root = resolve_root(dst, config)?; + let tracker = InstallTracker::load(config, &root)?; + for (k, v) in tracker.all_installed_bins() { + println!("{}:", k); + for bin in v { + println!(" {}", bin); + } + } + Ok(()) +} diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index c8b2fdca6d4..9f935d1df88 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -1,41 +1,104 @@ +use std::collections::BTreeMap; use std::env; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::io; -use std::path::Path; - -use rustc_serialize::{Decodable, Decoder}; +use std::fmt; +use std::fs; +use std::io::{BufRead, BufReader, ErrorKind}; +use std::path::{Path, PathBuf}; use git2::Config as GitConfig; +use git2::Repository as GitRepository; -use term::color::BLACK; - -use util::{GitRepo, HgRepo, CargoResult, human, ChainError, internal}; -use util::Config; +use crate::core::{compiler, Workspace}; +use crate::util::errors::{self, CargoResult, CargoResultExt}; +use crate::util::{existing_vcs_repo, internal, FossilRepo, GitRepo, HgRepo, PijulRepo}; +use crate::util::{paths, validate_package_name, Config}; use toml; #[derive(Clone, Copy, Debug, PartialEq)] -pub enum VersionControl { Git, Hg, NoVcs } +pub enum VersionControl { + Git, + Hg, + Pijul, + Fossil, + NoVcs, +} -pub struct NewOptions<'a> { +#[derive(Debug)] +pub struct NewOptions { pub version_control: Option, - pub bin: bool, - pub path: &'a str, - pub name: Option<&'a str>, -} - -impl Decodable for VersionControl { - fn decode(d: &mut D) -> Result { - Ok(match &try!(d.read_str())[..] { - "git" => VersionControl::Git, - "hg" => VersionControl::Hg, - "none" => VersionControl::NoVcs, - n => { - let err = format!("could not decode '{}' as version control", n); - return Err(d.error(&err)); - } - }) + pub kind: NewProjectKind, + /// Absolute path to the directory for the new package + pub path: PathBuf, + pub name: Option, + pub edition: Option, + pub registry: Option, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum NewProjectKind { + Bin, + Lib, +} + +impl NewProjectKind { + fn is_bin(self) -> bool { + self == NewProjectKind::Bin + } +} + +impl fmt::Display for NewProjectKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + NewProjectKind::Bin => "binary (application)", + NewProjectKind::Lib => "library", + } + .fmt(f) + } +} + +struct SourceFileInformation { + relative_path: String, + target_name: String, + bin: bool, +} + +struct MkOptions<'a> { + version_control: Option, + path: &'a Path, + name: &'a str, + source_files: Vec, + bin: bool, + edition: Option<&'a str>, + registry: Option<&'a str>, +} + +impl NewOptions { + pub fn new( + version_control: Option, + bin: bool, + lib: bool, + path: PathBuf, + name: Option, + edition: Option, + registry: Option, + ) -> CargoResult { + let kind = match (bin, lib) { + (true, true) => failure::bail!("can't specify both lib and binary outputs"), + (false, true) => NewProjectKind::Lib, + // default to bin + (_, false) => NewProjectKind::Bin, + }; + + let opts = NewOptions { + version_control, + kind, + path, + name, + edition, + registry, + }; + Ok(opts) } } @@ -45,194 +108,674 @@ struct CargoNewConfig { version_control: Option, } -pub fn new(opts: NewOptions, config: &Config) -> CargoResult<()> { - let path = config.cwd().join(opts.path); - if fs::metadata(&path).is_ok() { - return Err(human(format!("Destination `{}` already exists", - path.display()))) +fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> { + if let Some(ref name) = opts.name { + return Ok(name); } - let name = match opts.name { - Some(name) => name, - None => { - let dir_name = try!(path.file_name().and_then(|s| s.to_str()).chain_error(|| { - human(&format!("cannot create a project with a non-unicode name: {:?}", - path.file_name().unwrap())) - })); - if opts.bin { - dir_name - } else { - let new_name = strip_rust_affixes(dir_name); - if new_name != dir_name { - let message = format!( - "note: package will be named `{}`; use --name to override", - new_name); - try!(config.shell().say(&message, BLACK)); + + let file_name = path.file_name().ok_or_else(|| { + failure::format_err!( + "cannot auto-detect package name from path {:?} ; use --name to override", + path.as_os_str() + ) + })?; + + file_name.to_str().ok_or_else(|| { + failure::format_err!( + "cannot create package with a non-unicode name: {:?}", + file_name + ) + }) +} + +fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> { + // If --name is already used to override, no point in suggesting it + // again as a fix. + let name_help = match opts.name { + Some(_) => "", + None => "\nuse --name to override crate name", + }; + + // Ban keywords + test list found at + // https://doc.rust-lang.org/grammar.html#keywords + let blacklist = [ + "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do", + "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop", + "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", + "pure", "ref", "return", "self", "sizeof", "static", "struct", "super", "test", "trait", + "true", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield", + ]; + if blacklist.contains(&name) || (opts.kind.is_bin() && compiler::is_bad_artifact_name(name)) { + failure::bail!( + "The name `{}` cannot be used as a crate name{}", + name, + name_help + ) + } + + if let Some(ref c) = name.chars().nth(0) { + if c.is_digit(10) { + failure::bail!( + "Package names starting with a digit cannot be used as a crate name{}", + name_help + ) + } + } + + validate_package_name(name, "crate name", name_help)?; + Ok(()) +} + +fn detect_source_paths_and_types( + package_path: &Path, + package_name: &str, + detected_files: &mut Vec, +) -> CargoResult<()> { + let path = package_path; + let name = package_name; + + enum H { + Bin, + Lib, + Detect, + } + + struct Test { + proposed_path: String, + handling: H, + } + + let tests = vec![ + Test { + proposed_path: "src/main.rs".to_string(), + handling: H::Bin, + }, + Test { + proposed_path: "main.rs".to_string(), + handling: H::Bin, + }, + Test { + proposed_path: format!("src/{}.rs", name), + handling: H::Detect, + }, + Test { + proposed_path: format!("{}.rs", name), + handling: H::Detect, + }, + Test { + proposed_path: "src/lib.rs".to_string(), + handling: H::Lib, + }, + Test { + proposed_path: "lib.rs".to_string(), + handling: H::Lib, + }, + ]; + + for i in tests { + let pp = i.proposed_path; + + // path/pp does not exist or is not a file + if !fs::metadata(&path.join(&pp)) + .map(|x| x.is_file()) + .unwrap_or(false) + { + continue; + } + + let sfi = match i.handling { + H::Bin => SourceFileInformation { + relative_path: pp, + target_name: package_name.to_string(), + bin: true, + }, + H::Lib => SourceFileInformation { + relative_path: pp, + target_name: package_name.to_string(), + bin: false, + }, + H::Detect => { + let content = paths::read(&path.join(pp.clone()))?; + let isbin = content.contains("fn main"); + SourceFileInformation { + relative_path: pp, + target_name: package_name.to_string(), + bin: isbin, } - new_name } + }; + detected_files.push(sfi); + } + + // Check for duplicate lib attempt + + let mut previous_lib_relpath: Option<&str> = None; + let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new(); + + for i in detected_files { + if i.bin { + if let Some(x) = BTreeMap::get::(&duplicates_checker, i.target_name.as_ref()) { + failure::bail!( + "\ +multiple possible binary sources found: + {} + {} +cannot automatically generate Cargo.toml as the main target would be ambiguous", + &x.relative_path, + &i.relative_path + ); + } + duplicates_checker.insert(i.target_name.as_ref(), i); + } else { + if let Some(plp) = previous_lib_relpath { + failure::bail!( + "cannot have a package with \ + multiple libraries, \ + found both `{}` and `{}`", + plp, + i.relative_path + ) + } + previous_lib_relpath = Some(&i.relative_path); } - }; - for c in name.chars() { - if c.is_alphanumeric() { continue } - if c == '_' || c == '-' { continue } - return Err(human(&format!("Invalid character `{}` in crate name: `{}`", - c, name))); - } - mk(config, &path, name, &opts).chain_error(|| { - human(format!("Failed to create project `{}` at `{}`", - name, path.display())) - }) + } + + Ok(()) } -fn strip_rust_affixes(name: &str) -> &str { - for &prefix in &["rust-", "rust_", "rs-", "rs_"] { - if name.starts_with(prefix) { - return &name[prefix.len()..]; +fn plan_new_source_file(bin: bool, package_name: String) -> SourceFileInformation { + if bin { + SourceFileInformation { + relative_path: "src/main.rs".to_string(), + target_name: package_name, + bin: true, } - } - for &suffix in &["-rust", "_rust", "-rs", "_rs"] { - if name.ends_with(suffix) { - return &name[..name.len()-suffix.len()]; + } else { + SourceFileInformation { + relative_path: "src/lib.rs".to_string(), + target_name: package_name, + bin: false, } } - name } -fn existing_vcs_repo(path: &Path) -> bool { - GitRepo::discover(path).is_ok() || HgRepo::discover(path).is_ok() +pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { + let path = &opts.path; + if fs::metadata(path).is_ok() { + failure::bail!( + "destination `{}` already exists\n\n\ + Use `cargo init` to initialize the directory", + path.display() + ) + } + + let name = get_name(path, opts)?; + check_name(name, opts)?; + + let mkopts = MkOptions { + version_control: opts.version_control, + path, + name, + source_files: vec![plan_new_source_file(opts.kind.is_bin(), name.to_string())], + bin: opts.kind.is_bin(), + edition: opts.edition.as_ref().map(|s| &**s), + registry: opts.registry.as_ref().map(|s| &**s), + }; + + mk(config, &mkopts).chain_err(|| { + failure::format_err!( + "Failed to create package `{}` at `{}`", + name, + path.display() + ) + })?; + Ok(()) } -fn file(p: &Path, contents: &[u8]) -> io::Result<()> { - try!(File::create(p)).write_all(contents) +pub fn init(opts: &NewOptions, config: &Config) -> CargoResult<()> { + let path = &opts.path; + + if fs::metadata(&path.join("Cargo.toml")).is_ok() { + failure::bail!("`cargo init` cannot be run on existing Cargo packages") + } + + let name = get_name(path, opts)?; + check_name(name, opts)?; + + let mut src_paths_types = vec![]; + + detect_source_paths_and_types(path, name, &mut src_paths_types)?; + + if src_paths_types.is_empty() { + src_paths_types.push(plan_new_source_file(opts.kind.is_bin(), name.to_string())); + } else { + // --bin option may be ignored if lib.rs or src/lib.rs present + // Maybe when doing `cargo init --bin` inside a library package stub, + // user may mean "initialize for library, but also add binary target" + } + + let mut version_control = opts.version_control; + + if version_control == None { + let mut num_detected_vsces = 0; + + if fs::metadata(&path.join(".git")).is_ok() { + version_control = Some(VersionControl::Git); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".hg")).is_ok() { + version_control = Some(VersionControl::Hg); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".pijul")).is_ok() { + version_control = Some(VersionControl::Pijul); + num_detected_vsces += 1; + } + + if fs::metadata(&path.join(".fossil")).is_ok() { + version_control = Some(VersionControl::Fossil); + num_detected_vsces += 1; + } + + // if none exists, maybe create git, like in `cargo new` + + if num_detected_vsces > 1 { + failure::bail!( + "more than one of .hg, .git, .pijul, .fossil configurations \ + found and the ignore file can't be filled in as \ + a result. specify --vcs to override detection" + ); + } + } + + let mkopts = MkOptions { + version_control, + path, + name, + bin: src_paths_types.iter().any(|x| x.bin), + source_files: src_paths_types, + edition: opts.edition.as_ref().map(|s| &**s), + registry: opts.registry.as_ref().map(|s| &**s), + }; + + mk(config, &mkopts).chain_err(|| { + failure::format_err!( + "Failed to create package `{}` at `{}`", + name, + path.display() + ) + })?; + Ok(()) } -fn mk(config: &Config, path: &Path, name: &str, - opts: &NewOptions) -> CargoResult<()> { - let cfg = try!(global_config(config)); - let mut ignore = "target\n".to_string(); - let in_existing_vcs_repo = existing_vcs_repo(path.parent().unwrap()); - if !opts.bin { - ignore.push_str("Cargo.lock\n"); +/// IgnoreList +struct IgnoreList { + /// git like formatted entries + ignore: Vec, + /// mercurial formatted entries + hg_ignore: Vec, +} + +impl IgnoreList { + /// constructor to build a new ignore file + fn new() -> IgnoreList { + IgnoreList { + ignore: Vec::new(), + hg_ignore: Vec::new(), + } + } + + /// add a new entry to the ignore list. Requires two arguments with the + /// entry in two different formats. One for "git style" entries and one for + /// "mercurial like" entries. + fn push(&mut self, ignore: &str, hg_ignore: &str) { + self.ignore.push(ignore.to_string()); + self.hg_ignore.push(hg_ignore.to_string()); + } + + /// Return the correctly formatted content of the ignore file for the given + /// version control system as `String`. + fn format_new(&self, vcs: VersionControl) -> String { + let ignore_items = match vcs { + VersionControl::Hg => &self.hg_ignore, + _ => &self.ignore, + }; + + ignore_items.join("\n") + "\n" + } + + /// format_existing is used to format the IgnoreList when the ignore file + /// already exists. It reads the contents of the given `BufRead` and + /// checks if the contents of the ignore list are already existing in the + /// file. + fn format_existing(&self, existing: T, vcs: VersionControl) -> String { + // TODO: is unwrap safe? + let existing_items = existing.lines().collect::, _>>().unwrap(); + + let ignore_items = match vcs { + VersionControl::Hg => &self.hg_ignore, + _ => &self.ignore, + }; + + let mut out = "\n\n#Added by cargo\n\ + #\n\ + #already existing elements are commented out\n\n" + .to_string(); + + for item in ignore_items { + if existing_items.contains(item) { + out.push('#'); + } + out.push_str(item); + out.push('\n'); + } + + out } +} + +/// Writes the ignore file to the given directory. If the ignore file for the +/// given vcs system already exists, its content is read and duplicate ignore +/// file entries are filtered out. +fn write_ignore_file( + base_path: &Path, + list: &IgnoreList, + vcs: VersionControl, +) -> CargoResult { + let fp_ignore = match vcs { + VersionControl::Git => base_path.join(".gitignore"), + VersionControl::Hg => base_path.join(".hgignore"), + VersionControl::Pijul => base_path.join(".ignore"), + VersionControl::Fossil => return Ok("".to_string()), + VersionControl::NoVcs => return Ok("".to_string()), + }; - let vcs = match (opts.version_control, cfg.version_control, in_existing_vcs_repo) { - (None, None, false) => VersionControl::Git, - (None, Some(option), false) => option, - (Some(option), _, false) => option, - (_, _, true) => VersionControl::NoVcs, + let ignore: String = match fs::File::open(&fp_ignore) { + Err(why) => match why.kind() { + ErrorKind::NotFound => list.format_new(vcs), + _ => return Err(failure::format_err!("{}", why)), + }, + Ok(file) => list.format_existing(BufReader::new(file), vcs), }; + paths::append(&fp_ignore, ignore.as_bytes())?; + + Ok(ignore) +} + +/// Initializes the correct VCS system based on the provided config. +fn init_vcs(path: &Path, vcs: VersionControl, config: &Config) -> CargoResult<()> { match vcs { VersionControl::Git => { - try!(GitRepo::init(path)); - try!(file(&path.join(".gitignore"), ignore.as_bytes())); - }, + if !path.join(".git").exists() { + // Temporary fix to work around bug in libgit2 when creating a + // directory in the root of a posix filesystem. + // See: https://github.com/libgit2/libgit2/issues/5130 + fs::create_dir_all(path)?; + GitRepo::init(path, config.cwd())?; + } + } VersionControl::Hg => { - try!(HgRepo::init(path)); - try!(file(&path.join(".hgignore"), ignore.as_bytes())); - }, + if !path.join(".hg").exists() { + HgRepo::init(path, config.cwd())?; + } + } + VersionControl::Pijul => { + if !path.join(".pijul").exists() { + PijulRepo::init(path, config.cwd())?; + } + } + VersionControl::Fossil => { + if !path.join(".fossil").exists() { + FossilRepo::init(path, config.cwd())?; + } + } VersionControl::NoVcs => { - try!(fs::create_dir(path)); - }, + fs::create_dir_all(path)?; + } }; - let (author_name, email) = try!(discover_author()); - // Hoo boy, sure glad we've got exhaustivenes checking behind us. + Ok(()) +} + +fn mk(config: &Config, opts: &MkOptions<'_>) -> CargoResult<()> { + let path = opts.path; + let name = opts.name; + let cfg = global_config(config)?; + + // Using the push method with two arguments ensures that the entries for + // both `ignore` and `hgignore` are in sync. + let mut ignore = IgnoreList::new(); + ignore.push("/target", "^target/"); + ignore.push("**/*.rs.bk", "glob:*.rs.bk"); + if !opts.bin { + ignore.push("Cargo.lock", "glob:Cargo.lock"); + } + + let vcs = opts.version_control.unwrap_or_else(|| { + let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd()); + match (cfg.version_control, in_existing_vcs) { + (None, false) => VersionControl::Git, + (Some(opt), false) => opt, + (_, true) => VersionControl::NoVcs, + } + }); + + init_vcs(path, vcs, config)?; + write_ignore_file(path, &ignore, vcs)?; + + let (author_name, email) = discover_author()?; let author = match (cfg.name, cfg.email, author_name, email) { - (Some(name), Some(email), _, _) | - (Some(name), None, _, Some(email)) | - (None, Some(email), name, _) | - (None, None, name, Some(email)) => format!("{} <{}>", name, email), - (Some(name), None, _, None) | - (None, None, name, None) => name, + (Some(name), Some(email), _, _) + | (Some(name), None, _, Some(email)) + | (None, Some(email), name, _) + | (None, None, name, Some(email)) => { + if email.is_empty() { + name + } else { + format!("{} <{}>", name, email) + } + } + (Some(name), None, _, None) | (None, None, name, None) => name, }; - try!(file(&path.join("Cargo.toml"), format!( -r#"[package] + let mut cargotoml_path_specifier = String::new(); + + // Calculate what `[lib]` and `[[bin]]`s we need to append to `Cargo.toml`. + + for i in &opts.source_files { + if i.bin { + if i.relative_path != "src/main.rs" { + cargotoml_path_specifier.push_str(&format!( + r#" +[[bin]] +name = "{}" +path = {} +"#, + i.target_name, + toml::Value::String(i.relative_path.clone()) + )); + } + } else if i.relative_path != "src/lib.rs" { + cargotoml_path_specifier.push_str(&format!( + r#" +[lib] +name = "{}" +path = {} +"#, + i.target_name, + toml::Value::String(i.relative_path.clone()) + )); + } + } + + // Create `Cargo.toml` file with necessary `[lib]` and `[[bin]]` sections, if needed. + + paths::write( + &path.join("Cargo.toml"), + format!( + r#"[package] name = "{}" version = "0.1.0" authors = [{}] -"#, name, toml::Value::String(author)).as_bytes())); +edition = {} +{} +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - try!(fs::create_dir(&path.join("src"))); +[dependencies] +{}"#, + name, + toml::Value::String(author), + match opts.edition { + Some(edition) => toml::Value::String(edition.to_string()), + None => toml::Value::String("2018".to_string()), + }, + match opts.registry { + Some(registry) => format!( + "publish = {}\n", + toml::Value::Array(vec!(toml::Value::String(registry.to_string()))) + ), + None => "".to_string(), + }, + cargotoml_path_specifier + ) + .as_bytes(), + )?; - if opts.bin { - try!(file(&path.join("src/main.rs"), b"\ + // Create all specified source files (with respective parent directories) if they don't exist. + + for i in &opts.source_files { + let path_of_source_file = path.join(i.relative_path.clone()); + + if let Some(src_dir) = path_of_source_file.parent() { + fs::create_dir_all(src_dir)?; + } + + let default_file_content: &[u8] = if i.bin { + b"\ fn main() { println!(\"Hello, world!\"); } -")); - } else { - try!(file(&path.join("src/lib.rs"), b"\ -#[test] -fn it_works() { +" + } else { + b"\ +#[cfg(test)] +mod tests { + #[test] + fn it_works() { + assert_eq!(2 + 2, 4); + } } -")); +" + }; + + if !fs::metadata(&path_of_source_file) + .map(|x| x.is_file()) + .unwrap_or(false) + { + paths::write(&path_of_source_file, default_file_content)?; + } + } + + if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) { + let msg = format!( + "compiling this new crate may not work due to invalid \ + workspace configuration\n\n{}", + errors::display_causes(&e) + ); + config.shell().warn(msg)?; } Ok(()) } +fn get_environment_variable(variables: &[&str]) -> Option { + variables.iter().filter_map(|var| env::var(var).ok()).next() +} + fn discover_author() -> CargoResult<(String, Option)> { - let git_config = GitConfig::open_default().ok(); + let cwd = env::current_dir()?; + let git_config = if let Ok(repo) = GitRepository::discover(&cwd) { + repo.config() + .ok() + .or_else(|| GitConfig::open_default().ok()) + } else { + GitConfig::open_default().ok() + }; let git_config = git_config.as_ref(); - let name = git_config.and_then(|g| g.get_string("user.name").ok()) - .map(|s| s.to_string()) - .or_else(|| env::var("USER").ok()) // unix - .or_else(|| env::var("USERNAME").ok()); // windows + let name_variables = [ + "CARGO_NAME", + "GIT_AUTHOR_NAME", + "GIT_COMMITTER_NAME", + "USER", + "USERNAME", + "NAME", + ]; + let name = get_environment_variable(&name_variables[0..3]) + .or_else(|| git_config.and_then(|g| g.get_string("user.name").ok())) + .or_else(|| get_environment_variable(&name_variables[3..])); + let name = match name { Some(name) => name, None => { - let username_var = if cfg!(windows) {"USERNAME"} else {"USER"}; - return Err(human(format!("could not determine the current \ - user, please set ${}", username_var))) + let username_var = if cfg!(windows) { "USERNAME" } else { "USER" }; + failure::bail!( + "could not determine the current user, please set ${}", + username_var + ) } }; - let email = git_config.and_then(|g| g.get_string("user.email").ok()) - .or_else(|| env::var("EMAIL").ok()); + let email_variables = [ + "CARGO_EMAIL", + "GIT_AUTHOR_EMAIL", + "GIT_COMMITTER_EMAIL", + "EMAIL", + ]; + let email = get_environment_variable(&email_variables[0..3]) + .or_else(|| git_config.and_then(|g| g.get_string("user.email").ok())) + .or_else(|| get_environment_variable(&email_variables[3..])); let name = name.trim().to_string(); - let email = email.map(|s| s.trim().to_string()); + let email = email.map(|s| { + let mut s = s.trim(); + + // In some cases emails will already have <> remove them since they + // are already added when needed. + if s.starts_with('<') && s.ends_with('>') { + s = &s[1..s.len() - 1]; + } + + s.to_string() + }); Ok((name, email)) } fn global_config(config: &Config) -> CargoResult { - let name = try!(config.get_string("cargo-new.name")).map(|s| s.0); - let email = try!(config.get_string("cargo-new.email")).map(|s| s.0); - let vcs = try!(config.get_string("cargo-new.vcs")); + let name = config.get_string("cargo-new.name")?.map(|s| s.val); + let email = config.get_string("cargo-new.email")?.map(|s| s.val); + let vcs = config.get_string("cargo-new.vcs")?; - let vcs = match vcs.as_ref().map(|p| (&p.0[..], &p.1)) { + let vcs = match vcs.as_ref().map(|p| (&p.val[..], &p.definition)) { Some(("git", _)) => Some(VersionControl::Git), Some(("hg", _)) => Some(VersionControl::Hg), + Some(("pijul", _)) => Some(VersionControl::Pijul), Some(("none", _)) => Some(VersionControl::NoVcs), Some((s, p)) => { - return Err(internal(format!("invalid configuration for key \ - `cargo-new.vcs`, unknown vcs `{}` \ - (found in {:?})", s, p))) + return Err(internal(format!( + "invalid configuration for key \ + `cargo-new.vcs`, unknown vcs `{}` \ + (found in {})", + s, p + ))); } - None => None + None => None, }; Ok(CargoNewConfig { - name: name, - email: email, + name, + email, version_control: vcs, }) } - -#[cfg(test)] -mod tests { - use super::strip_rust_affixes; - - #[test] - fn affixes_stripped() { - assert_eq!(strip_rust_affixes("rust-foo"), "foo"); - assert_eq!(strip_rust_affixes("foo-rs"), "foo"); - assert_eq!(strip_rust_affixes("rs_foo"), "foo"); - // Only one affix is stripped - assert_eq!(strip_rust_affixes("rs-foo-rs"), "foo-rs"); - assert_eq!(strip_rust_affixes("foo-rs-rs"), "foo-rs"); - // It shouldn't touch the middle - assert_eq!(strip_rust_affixes("some-rust-crate"), "some-rust-crate"); - } -} diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs new file mode 100644 index 00000000000..f782414cace --- /dev/null +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -0,0 +1,136 @@ +use std::collections::HashMap; +use std::path::PathBuf; + +use serde::ser; +use serde::Serialize; + +use crate::core::resolver::{Resolve, ResolveOpts}; +use crate::core::{Package, PackageId, Workspace}; +use crate::ops::{self, Packages}; +use crate::util::CargoResult; + +const VERSION: u32 = 1; + +pub struct OutputMetadataOptions { + pub features: Vec, + pub no_default_features: bool, + pub all_features: bool, + pub no_deps: bool, + pub version: u32, +} + +/// Loads the manifest, resolves the dependencies of the package to the concrete +/// used versions - considering overrides - and writes all dependencies in a JSON +/// format to stdout. +pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult { + if opt.version != VERSION { + failure::bail!( + "metadata version {} not supported, only {} is currently supported", + opt.version, + VERSION + ); + } + if opt.no_deps { + metadata_no_deps(ws, opt) + } else { + metadata_full(ws, opt) + } +} + +fn metadata_no_deps(ws: &Workspace<'_>, _opt: &OutputMetadataOptions) -> CargoResult { + Ok(ExportInfo { + packages: ws.members().cloned().collect(), + workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(), + resolve: None, + target_directory: ws.target_dir().into_path_unlocked(), + version: VERSION, + workspace_root: ws.root().to_path_buf(), + }) +} + +fn metadata_full(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult { + let specs = Packages::All.to_package_id_specs(ws)?; + let opts = ResolveOpts::new( + /*dev_deps*/ true, + &opt.features, + opt.all_features, + !opt.no_default_features, + ); + let (package_set, resolve) = ops::resolve_ws_with_opts(ws, opts, &specs)?; + let mut packages = HashMap::new(); + for pkg in package_set.get_many(package_set.package_ids())? { + packages.insert(pkg.package_id(), pkg.clone()); + } + + Ok(ExportInfo { + packages: packages.values().map(|p| (*p).clone()).collect(), + workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(), + resolve: Some(MetadataResolve { + resolve: (packages, resolve), + root: ws.current_opt().map(|pkg| pkg.package_id()), + }), + target_directory: ws.target_dir().into_path_unlocked(), + version: VERSION, + workspace_root: ws.root().to_path_buf(), + }) +} + +#[derive(Serialize)] +pub struct ExportInfo { + packages: Vec, + workspace_members: Vec, + resolve: Option, + target_directory: PathBuf, + version: u32, + workspace_root: PathBuf, +} + +/// Newtype wrapper to provide a custom `Serialize` implementation. +/// The one from lock file does not fit because it uses a non-standard +/// format for `PackageId`s +#[derive(Serialize)] +struct MetadataResolve { + #[serde(rename = "nodes", serialize_with = "serialize_resolve")] + resolve: (HashMap, Resolve), + root: Option, +} + +fn serialize_resolve( + (packages, resolve): &(HashMap, Resolve), + s: S, +) -> Result +where + S: ser::Serializer, +{ + #[derive(Serialize)] + struct Dep { + name: String, + pkg: PackageId, + } + + #[derive(Serialize)] + struct Node<'a> { + id: PackageId, + dependencies: Vec, + deps: Vec, + features: Vec<&'a str>, + } + + s.collect_seq(resolve.iter().map(|id| { + Node { + id, + dependencies: resolve.deps(id).map(|(pkg, _deps)| pkg).collect(), + deps: resolve + .deps(id) + .filter_map(|(pkg, _deps)| { + packages + .get(&pkg) + .and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib())) + .and_then(|lib_target| resolve.extern_crate_name(id, pkg, lib_target).ok()) + .map(|name| Dep { name, pkg }) + }) + .collect(), + features: resolve.features_sorted(id), + } + })) +} diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index d607e61f537..f24332db887 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -1,75 +1,170 @@ -use std::io::prelude::*; +use std::collections::{BTreeSet, HashMap}; use std::fs::{self, File}; +use std::io::prelude::*; +use std::io::SeekFrom; use std::path::{self, Path, PathBuf}; +use std::rc::Rc; +use std::sync::Arc; -use tar::Archive; -use flate2::{GzBuilder, Compression}; use flate2::read::GzDecoder; +use flate2::{Compression, GzBuilder}; +use log::debug; +use serde_json::{self, json}; +use tar::{Archive, Builder, EntryType, Header}; +use termcolor::Color; + +use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; +use crate::core::resolver::ResolveOpts; +use crate::core::Feature; +use crate::core::{ + Package, PackageId, PackageIdSpec, PackageSet, Resolve, Source, SourceId, Verbosity, Workspace, +}; +use crate::ops; +use crate::sources::PathSource; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::paths; +use crate::util::toml::TomlManifest; +use crate::util::{self, internal, Config, FileLock}; -use core::{Source, SourceId, Package, PackageId}; -use sources::PathSource; -use util::{self, CargoResult, human, internal, ChainError, Config}; -use ops; +pub struct PackageOpts<'cfg> { + pub config: &'cfg Config, + pub list: bool, + pub check_metadata: bool, + pub allow_dirty: bool, + pub verify: bool, + pub jobs: Option, + pub target: Option, + pub features: Vec, + pub all_features: bool, + pub no_default_features: bool, +} -struct Bomb { path: Option } +static VCS_INFO_FILE: &str = ".cargo_vcs_info.json"; -impl Drop for Bomb { - fn drop(&mut self) { - match self.path.as_ref() { - Some(path) => { let _ = fs::remove_file(path); } - None => {} - } +pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult> { + if ws.root().join("Cargo.lock").exists() { + // Make sure the Cargo.lock is up-to-date and valid. + ops::resolve_ws(ws)?; + // If Cargo.lock does not exist, it will be generated by `build_lock` + // below, and will be validated during the verification step. } -} + let pkg = ws.current()?; + let config = ws.config(); -pub fn package(manifest_path: &Path, - config: &Config, - verify: bool, - list: bool, - metadata: bool) -> CargoResult> { - let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), - config)); - try!(src.update()); - let pkg = try!(src.root_package()); + let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config); + src.update()?; - if metadata { - try!(check_metadata(&pkg, config)); + if opts.check_metadata { + check_metadata(pkg, config)?; } - if list { + verify_dependencies(pkg)?; + + if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() { + config.shell().warn( + "both package.include and package.exclude are specified; \ + the exclude list will be ignored", + )?; + } + // `list_files` outputs warnings as a side effect, so only do it once. + let src_files = src.list_files(pkg)?; + + // Make sure a VCS info file is not included in source, regardless of if + // we produced the file above, and in particular if we did not. + check_vcs_file_collision(pkg, &src_files)?; + + // Check (git) repository state, getting the current commit hash if not + // dirty. This will `bail!` if dirty, unless allow_dirty. Produce json + // info for any sha1 (HEAD revision) returned. + let vcs_info = if !opts.allow_dirty { + check_repo_state(pkg, &src_files, config, opts.allow_dirty)? + .map(|h| json!({"git":{"sha1": h}})) + } else { + None + }; + + if opts.list { let root = pkg.root(); - let mut list: Vec<_> = try!(src.list_files(&pkg)).iter().map(|file| { - util::without_prefix(&file, &root).unwrap().to_path_buf() - }).collect(); - list.sort(); + let mut list: Vec<_> = src + .list_files(pkg)? + .iter() + .map(|file| file.strip_prefix(root).unwrap().to_path_buf()) + .collect(); + if pkg.include_lockfile() && !list.contains(&PathBuf::from("Cargo.lock")) { + // A generated Cargo.lock will be included. + list.push("Cargo.lock".into()); + } + if vcs_info.is_some() { + list.push(Path::new(VCS_INFO_FILE).to_path_buf()); + } + list.sort_unstable(); for file in list.iter() { println!("{}", file.display()); } - return Ok(None) + return Ok(None); + } + + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let dir = ws.target_dir().join("package"); + let mut dst = { + let tmp = format!(".{}", filename); + dir.open_rw(&tmp, config, "package scratch space")? + }; + + // Package up and test a temporary tarball and only move it to the final + // location if it actually passes all our tests. Any previously existing + // tarball can be assumed as corrupt or invalid, so we just blow it away if + // it exists. + config + .shell() + .status("Packaging", pkg.package_id().to_string())?; + dst.file().set_len(0)?; + tar(ws, &src_files, vcs_info.as_ref(), dst.file(), &filename) + .chain_err(|| failure::format_err!("failed to prepare local package for uploading"))?; + if opts.verify { + dst.seek(SeekFrom::Start(0))?; + run_verify(ws, &dst, opts).chain_err(|| "failed to verify package tarball")? } + dst.seek(SeekFrom::Start(0))?; + { + let src_path = dst.path(); + let dst_path = dst.parent().join(&filename); + fs::rename(&src_path, &dst_path) + .chain_err(|| "failed to move temporary tarball into final location")?; + } + Ok(Some(dst)) +} + +/// Construct `Cargo.lock` for the package to be published. +fn build_lock(ws: &Workspace<'_>) -> CargoResult { + let config = ws.config(); + let orig_resolve = ops::load_pkg_lockfile(ws)?; - let filename = format!("package/{}-{}.crate", pkg.name(), pkg.version()); - let target_dir = config.target_dir(&pkg); - let dst = target_dir.join(&filename); - if fs::metadata(&dst).is_ok() { return Ok(Some(dst)) } + // Convert Package -> TomlManifest -> Manifest -> Package + let orig_pkg = ws.current()?; + let toml_manifest = Rc::new(orig_pkg.manifest().original().prepare_for_publish(config)?); + let package_root = orig_pkg.root(); + let source_id = orig_pkg.package_id().source_id(); + let (manifest, _nested_paths) = + TomlManifest::to_real_manifest(&toml_manifest, source_id, package_root, config)?; + let new_pkg = Package::new(manifest, orig_pkg.manifest_path()); - let mut bomb = Bomb { path: Some(dst.clone()) }; + // Regenerate Cargo.lock using the old one as a guide. + let specs = vec![PackageIdSpec::from_package_id(new_pkg.package_id())]; + let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?; + let (pkg_set, new_resolve) = + ops::resolve_ws_with_opts(&tmp_ws, ResolveOpts::everything(), &specs)?; - try!(config.shell().status("Packaging", pkg.package_id().to_string())); - try!(tar(&pkg, &src, config, &dst).chain_error(|| { - human("failed to prepare local package for uploading") - })); - if verify { - try!(run_verify(config, &pkg, &dst).chain_error(|| { - human("failed to verify package tarball") - })) + if let Some(orig_resolve) = orig_resolve { + compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?; } - Ok(Some(bomb.path.take().unwrap())) + check_yanked(config, &pkg_set, &new_resolve)?; + + ops::resolve_to_string(&tmp_ws, &new_resolve) } -// check that the package has some piece of metadata that a human can +// Checks that the package has some piece of metadata that a human can // use to tell what the package is about. -#[allow(deprecated)] // connect => join in 1.3 fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { let md = pkg.manifest().metadata(); @@ -84,114 +179,569 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { )* }} } - lacking!(description, license || license_file, documentation || homepage || repository); + lacking!( + description, + license || license_file, + documentation || homepage || repository + ); if !missing.is_empty() { - let mut things = missing[..missing.len() - 1].connect(", "); - // things will be empty if and only if length == 1 (i.e. the only case + let mut things = missing[..missing.len() - 1].join(", "); + // `things` will be empty if and only if its length is 1 (i.e., the only case // to have no `or`). if !things.is_empty() { things.push_str(" or "); } - things.push_str(&missing.last().unwrap()); + things.push_str(missing.last().unwrap()); - try!(config.shell().warn( - &format!("warning: manifest has no {things}. \ - See http://doc.crates.io/manifest.html#package-metadata for more info.", - things = things))) + config.shell().warn(&format!( + "manifest has no {things}.\n\ + See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.", + things = things + ))? } Ok(()) } -fn tar(pkg: &Package, src: &PathSource, config: &Config, - dst: &Path) -> CargoResult<()> { +// Checks that the package dependencies are safe to deploy. +fn verify_dependencies(pkg: &Package) -> CargoResult<()> { + for dep in pkg.dependencies() { + if dep.source_id().is_path() && !dep.specified_req() { + failure::bail!( + "all path dependencies must have a version specified \ + when packaging.\ndependency `{}` does not specify \ + a version.", + dep.name_in_toml() + ) + } + } + Ok(()) +} - if fs::metadata(&dst).is_ok() { - return Err(human(format!("destination already exists: {}", - dst.display()))) +// Checks if the package source is in a *git* DVCS repository. If *git*, and +// the source is *dirty* (e.g., has uncommitted changes) and not `allow_dirty` +// then `bail!` with an informative message. Otherwise return the sha1 hash of +// the current *HEAD* commit, or `None` if *dirty*. +fn check_repo_state( + p: &Package, + src_files: &[PathBuf], + config: &Config, + allow_dirty: bool, +) -> CargoResult> { + if let Ok(repo) = git2::Repository::discover(p.root()) { + if let Some(workdir) = repo.workdir() { + debug!("found a git repo at {:?}", workdir); + let path = p.manifest_path(); + let path = path.strip_prefix(workdir).unwrap_or(path); + if let Ok(status) = repo.status_file(path) { + if (status & git2::Status::IGNORED).is_empty() { + debug!( + "found (git) Cargo.toml at {:?} in workdir {:?}", + path, workdir + ); + return git(p, src_files, &repo, allow_dirty); + } + } + config.shell().verbose(|shell| { + shell.warn(format!( + "No (git) Cargo.toml found at `{}` in workdir `{}`", + path.display(), + workdir.display() + )) + })?; + } + } else { + config.shell().verbose(|shell| { + shell.warn(format!("No (git) VCS found for `{}`", p.root().display())) + })?; } - try!(fs::create_dir_all(dst.parent().unwrap())); + // No VCS with a checked in `Cargo.toml` found, so we don't know if the + // directory is dirty or not, thus we have to assume that it's clean. + return Ok(None); - let tmpfile = try!(File::create(dst)); + fn git( + p: &Package, + src_files: &[PathBuf], + repo: &git2::Repository, + allow_dirty: bool, + ) -> CargoResult> { + let workdir = repo.workdir().unwrap(); + let dirty = src_files + .iter() + .filter(|file| { + let relative = file.strip_prefix(workdir).unwrap(); + if let Ok(status) = repo.status_file(relative) { + status != git2::Status::CURRENT + } else { + false + } + }) + .map(|path| { + path.strip_prefix(p.root()) + .unwrap_or(path) + .display() + .to_string() + }) + .collect::>(); + if dirty.is_empty() { + let rev_obj = repo.revparse_single("HEAD")?; + Ok(Some(rev_obj.id().to_string())) + } else { + if !allow_dirty { + failure::bail!( + "{} files in the working directory contain changes that were \ + not yet committed into git:\n\n{}\n\n\ + to proceed despite this and include the uncommited changes, pass the `--allow-dirty` flag", + dirty.len(), + dirty.join("\n") + ) + } + Ok(None) + } + } +} - // Prepare the encoder and its header - let filename = Path::new(dst.file_name().unwrap()); - let encoder = GzBuilder::new().filename(try!(util::path2bytes(filename))) - .write(tmpfile, Compression::Best); +// Checks for and `bail!` if a source file matches `ROOT/VCS_INFO_FILE`, since +// this is now a Cargo reserved file name, and we don't want to allow forgery. +fn check_vcs_file_collision(pkg: &Package, src_files: &[PathBuf]) -> CargoResult<()> { + let root = pkg.root(); + let vcs_info_path = Path::new(VCS_INFO_FILE); + let collision = src_files + .iter() + .find(|&p| p.strip_prefix(root).unwrap() == vcs_info_path); + if collision.is_some() { + failure::bail!( + "Invalid inclusion of reserved file name \ + {} in package source", + VCS_INFO_FILE + ); + } + Ok(()) +} + +fn tar( + ws: &Workspace<'_>, + src_files: &[PathBuf], + vcs_info: Option<&serde_json::Value>, + dst: &File, + filename: &str, +) -> CargoResult<()> { + // Prepare the encoder and its header. + let filename = Path::new(filename); + let encoder = GzBuilder::new() + .filename(util::path2bytes(filename)?) + .write(dst, Compression::best()); - // Put all package files into a compressed archive - let ar = Archive::new(encoder); + // Put all package files into a compressed archive. + let mut ar = Builder::new(encoder); + let pkg = ws.current()?; + let config = ws.config(); let root = pkg.root(); - for file in try!(src.list_files(pkg)).iter() { - if &**file == dst { continue } - let relative = util::without_prefix(&file, &root).unwrap(); - let relative = try!(relative.to_str().chain_error(|| { - human(format!("non-utf8 path in source directory: {}", - relative.display())) - })); - let mut file = try!(File::open(file)); - try!(config.shell().verbose(|shell| { - shell.status("Archiving", &relative) - })); - let path = format!("{}-{}{}{}", pkg.name(), pkg.version(), - path::MAIN_SEPARATOR, relative); - try!(ar.append_file(&path, &mut file).chain_error(|| { - internal(format!("could not archive source file `{}`", relative)) - })); - } - try!(ar.finish()); + + for src_file in src_files { + let relative = src_file.strip_prefix(root)?; + check_filename(relative)?; + let relative_str = relative.to_str().ok_or_else(|| { + failure::format_err!("non-utf8 path in source directory: {}", relative.display()) + })?; + if relative_str == "Cargo.lock" { + // This is added manually below. + continue; + } + config + .shell() + .verbose(|shell| shell.status("Archiving", &relative_str))?; + let path = format!( + "{}-{}{}{}", + pkg.name(), + pkg.version(), + path::MAIN_SEPARATOR, + relative_str + ); + + // The `tar::Builder` type by default will build GNU archives, but + // unfortunately we force it here to use UStar archives instead. The + // UStar format has more limitations on the length of path name that it + // can encode, so it's not quite as nice to use. + // + // Older cargos, however, had a bug where GNU archives were interpreted + // as UStar archives. This bug means that if we publish a GNU archive + // which has fully filled out metadata it'll be corrupt when unpacked by + // older cargos. + // + // Hopefully in the future after enough cargos have been running around + // with the bugfixed tar-rs library we'll be able to switch this over to + // GNU archives, but for now we'll just say that you can't encode paths + // in archives that are *too* long. + // + // For an instance of this in the wild, use the tar-rs 0.3.3 library to + // unpack the selectors 0.4.0 crate on crates.io. Either that or take a + // look at rust-lang/cargo#2326. + let mut header = Header::new_ustar(); + header + .set_path(&path) + .chain_err(|| format!("failed to add to archive: `{}`", relative_str))?; + let mut file = File::open(src_file) + .chain_err(|| format!("failed to open for archiving: `{}`", src_file.display()))?; + let metadata = file + .metadata() + .chain_err(|| format!("could not learn metadata for: `{}`", relative_str))?; + header.set_metadata(&metadata); + + if relative_str == "Cargo.toml" { + let orig = Path::new(&path).with_file_name("Cargo.toml.orig"); + header.set_path(&orig)?; + header.set_cksum(); + ar.append(&header, &mut file).chain_err(|| { + internal(format!("could not archive source file `{}`", relative_str)) + })?; + + let mut header = Header::new_ustar(); + let toml = pkg.to_registry_toml(ws.config())?; + header.set_path(&path)?; + header.set_entry_type(EntryType::file()); + header.set_mode(0o644); + header.set_size(toml.len() as u64); + header.set_cksum(); + ar.append(&header, toml.as_bytes()).chain_err(|| { + internal(format!("could not archive source file `{}`", relative_str)) + })?; + } else { + header.set_cksum(); + ar.append(&header, &mut file).chain_err(|| { + internal(format!("could not archive source file `{}`", relative_str)) + })?; + } + } + + if let Some(json) = vcs_info { + let filename: PathBuf = Path::new(VCS_INFO_FILE).into(); + debug_assert!(check_filename(&filename).is_ok()); + let fnd = filename.display(); + config + .shell() + .verbose(|shell| shell.status("Archiving", &fnd))?; + let path = format!( + "{}-{}{}{}", + pkg.name(), + pkg.version(), + path::MAIN_SEPARATOR, + fnd + ); + let mut header = Header::new_ustar(); + header + .set_path(&path) + .chain_err(|| format!("failed to add to archive: `{}`", fnd))?; + let json = format!("{}\n", serde_json::to_string_pretty(json)?); + let mut header = Header::new_ustar(); + header.set_path(&path)?; + header.set_entry_type(EntryType::file()); + header.set_mode(0o644); + header.set_size(json.len() as u64); + header.set_cksum(); + ar.append(&header, json.as_bytes()) + .chain_err(|| internal(format!("could not archive source file `{}`", fnd)))?; + } + + if pkg.include_lockfile() { + let new_lock = build_lock(ws)?; + + config + .shell() + .verbose(|shell| shell.status("Archiving", "Cargo.lock"))?; + let path = format!( + "{}-{}{}Cargo.lock", + pkg.name(), + pkg.version(), + path::MAIN_SEPARATOR + ); + let mut header = Header::new_ustar(); + header.set_path(&path)?; + header.set_entry_type(EntryType::file()); + header.set_mode(0o644); + header.set_size(new_lock.len() as u64); + header.set_cksum(); + ar.append(&header, new_lock.as_bytes()) + .chain_err(|| internal("could not archive source file `Cargo.lock`"))?; + } + + let encoder = ar.into_inner()?; + encoder.finish()?; Ok(()) } -fn run_verify(config: &Config, pkg: &Package, tar: &Path) - -> CargoResult<()> { - try!(config.shell().status("Verifying", pkg)); +/// Generate warnings when packaging Cargo.lock, and the resolve have changed. +fn compare_resolve( + config: &Config, + current_pkg: &Package, + orig_resolve: &Resolve, + new_resolve: &Resolve, +) -> CargoResult<()> { + if config.shell().verbosity() != Verbosity::Verbose { + return Ok(()); + } + let new_set: BTreeSet = new_resolve.iter().collect(); + let orig_set: BTreeSet = orig_resolve.iter().collect(); + let added = new_set.difference(&orig_set); + // Removed entries are ignored, this is used to quickly find hints for why + // an entry changed. + let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect(); + for pkg_id in added { + if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() { + // Skip the package that is being created, since its SourceId + // (directory) changes. + continue; + } + // Check for candidates where the source has changed (such as [patch] + // or a dependency with multiple sources like path/version). + let removed_candidates: Vec<&PackageId> = removed + .iter() + .filter(|orig_pkg_id| { + orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version() + }) + .cloned() + .collect(); + let extra = match removed_candidates.len() { + 0 => { + // This can happen if the original was out of date. + let previous_versions: Vec<&PackageId> = removed + .iter() + .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name()) + .cloned() + .collect(); + match previous_versions.len() { + 0 => String::new(), + 1 => format!( + ", previous version was `{}`", + previous_versions[0].version() + ), + _ => format!( + ", previous versions were: {}", + previous_versions + .iter() + .map(|pkg_id| format!("`{}`", pkg_id.version())) + .collect::>() + .join(", ") + ), + } + } + 1 => { + // This can happen for multi-sourced dependencies like + // `{path="...", version="..."}` or `[patch]` replacement. + // `[replace]` is not captured in Cargo.lock. + format!( + ", was originally sourced from `{}`", + removed_candidates[0].source_id() + ) + } + _ => { + // I don't know if there is a way to actually trigger this, + // but handle it just in case. + let comma_list = removed_candidates + .iter() + .map(|pkg_id| format!("`{}`", pkg_id.source_id())) + .collect::>() + .join(", "); + format!( + ", was originally sourced from one of these sources: {}", + comma_list + ) + } + }; + let msg = format!( + "package `{}` added to the packaged Cargo.lock file{}", + pkg_id, extra + ); + config.shell().status_with_color("Note", msg, Color::Cyan)?; + } + Ok(()) +} + +fn check_yanked(config: &Config, pkg_set: &PackageSet<'_>, resolve: &Resolve) -> CargoResult<()> { + // Checking the yanked status involves taking a look at the registry and + // maybe updating files, so be sure to lock it here. + let _lock = config.acquire_package_cache_lock()?; + + let mut sources = pkg_set.sources_mut(); + for pkg_id in resolve.iter() { + if let Some(source) = sources.get_mut(pkg_id.source_id()) { + if source.is_yanked(pkg_id)? { + config.shell().warn(format!( + "package `{}` in Cargo.lock is yanked in registry `{}`, \ + consider updating to a version that is not yanked", + pkg_id, + pkg_id.source_id().display_registry_name() + ))?; + } + } + } + Ok(()) +} + +fn run_verify(ws: &Workspace<'_>, tar: &FileLock, opts: &PackageOpts<'_>) -> CargoResult<()> { + let config = ws.config(); + let pkg = ws.current()?; - let f = try!(GzDecoder::new(try!(File::open(tar)))); - let dst = pkg.root().join(&format!("target/package/{}-{}", - pkg.name(), pkg.version())); - if fs::metadata(&dst).is_ok() { - try!(fs::remove_dir_all(&dst)); + config.shell().status("Verifying", pkg)?; + + let f = GzDecoder::new(tar.file()); + let dst = tar + .parent() + .join(&format!("{}-{}", pkg.name(), pkg.version())); + if dst.exists() { + paths::remove_dir_all(&dst)?; } let mut archive = Archive::new(f); - try!(archive.unpack(dst.parent().unwrap())); - let manifest_path = dst.join("Cargo.toml"); - - // When packages are uploaded to the registry, all path dependencies are - // implicitly converted to registry-based dependencies, so we rewrite those - // dependencies here. - // - // We also be sure to point all paths at `dst` instead of the previous - // location that the package was original read from. In locking the - // `SourceId` we're telling it that the corresponding `PathSource` will be - // considered updated and won't actually read any packages. - let registry = try!(SourceId::for_central(config)); - let precise = Some("locked".to_string()); - let new_src = try!(SourceId::for_path(&dst)).with_precise(precise); - let new_pkgid = try!(PackageId::new(pkg.name(), pkg.version(), &new_src)); - let new_summary = pkg.summary().clone().map_dependencies(|d| { - if !d.source_id().is_path() { return d } - d.clone_inner().set_source_id(registry.clone()).into_dependency() - }); - let mut new_manifest = pkg.manifest().clone(); - new_manifest.set_summary(new_summary.override_id(new_pkgid)); - let new_pkg = Package::new(new_manifest, &manifest_path); - - // Now that we've rewritten all our path dependencies, compile it! - try!(ops::compile_pkg(&new_pkg, None, &ops::CompileOptions { - config: config, - jobs: None, - target: None, - features: &[], - no_default_features: false, - spec: None, - filter: ops::CompileFilter::Everything, - exec_engine: None, - release: false, - mode: ops::CompileMode::Build, - target_rustc_args: None, - })); + // We don't need to set the Modified Time, as it's not relevant to verification + // and it errors on filesystems that don't support setting a modified timestamp + archive.set_preserve_mtime(false); + archive.unpack(dst.parent().unwrap())?; + + // Manufacture an ephemeral workspace to ensure that even if the top-level + // package has a workspace we can still build our new crate. + let id = SourceId::for_path(&dst)?; + let mut src = PathSource::new(&dst, id, ws.config()); + let new_pkg = src.root_package()?; + let pkg_fingerprint = hash_all(&dst)?; + let ws = Workspace::ephemeral(new_pkg, config, None, true)?; + let rustc_args = if pkg + .manifest() + .features() + .require(Feature::public_dependency()) + .is_ok() + { + // FIXME: Turn this on at some point in the future + //Some(vec!["-D exported_private_dependencies".to_string()]) + Some(vec![]) + } else { + None + }; + + let exec: Arc = Arc::new(DefaultExecutor); + ops::compile_with_exec( + &ws, + &ops::CompileOptions { + config, + build_config: BuildConfig::new(config, opts.jobs, &opts.target, CompileMode::Build)?, + features: opts.features.clone(), + no_default_features: opts.no_default_features, + all_features: opts.all_features, + spec: ops::Packages::Packages(Vec::new()), + filter: ops::CompileFilter::Default { + required_features_filterable: true, + }, + target_rustdoc_args: None, + target_rustc_args: rustc_args, + local_rustdoc_args: None, + export_dir: None, + }, + &exec, + )?; + + // Check that `build.rs` didn't modify any files in the `src` directory. + let ws_fingerprint = hash_all(&dst)?; + if pkg_fingerprint != ws_fingerprint { + let changes = report_hash_difference(&pkg_fingerprint, &ws_fingerprint); + failure::bail!( + "Source directory was modified by build.rs during cargo publish. \ + Build scripts should not modify anything outside of OUT_DIR.\n\ + {}\n\n\ + To proceed despite this, pass the `--no-verify` flag.", + changes + ) + } + + Ok(()) +} + +fn hash_all(path: &Path) -> CargoResult> { + fn wrap(path: &Path) -> CargoResult> { + let mut result = HashMap::new(); + let walker = walkdir::WalkDir::new(path).into_iter(); + for entry in walker.filter_entry(|e| !(e.depth() == 1 && e.file_name() == "target")) { + let entry = entry?; + let file_type = entry.file_type(); + if file_type.is_file() { + let contents = fs::read(entry.path())?; + let hash = util::hex::hash_u64(&contents); + result.insert(entry.path().to_path_buf(), hash); + } else if file_type.is_symlink() { + let hash = util::hex::hash_u64(&fs::read_link(entry.path())?); + result.insert(entry.path().to_path_buf(), hash); + } else if file_type.is_dir() { + let hash = util::hex::hash_u64(&()); + result.insert(entry.path().to_path_buf(), hash); + } + } + Ok(result) + } + let result = wrap(path).chain_err(|| format!("failed to verify output at {:?}", path))?; + Ok(result) +} + +fn report_hash_difference(orig: &HashMap, after: &HashMap) -> String { + let mut changed = Vec::new(); + let mut removed = Vec::new(); + for (key, value) in orig { + match after.get(key) { + Some(after_value) => { + if value != after_value { + changed.push(key.to_string_lossy()); + } + } + None => removed.push(key.to_string_lossy()), + } + } + let mut added: Vec<_> = after + .keys() + .filter(|key| !orig.contains_key(*key)) + .map(|key| key.to_string_lossy()) + .collect(); + let mut result = Vec::new(); + if !changed.is_empty() { + changed.sort_unstable(); + result.push(format!("Changed: {}", changed.join("\n\t"))); + } + if !added.is_empty() { + added.sort_unstable(); + result.push(format!("Added: {}", added.join("\n\t"))); + } + if !removed.is_empty() { + removed.sort_unstable(); + result.push(format!("Removed: {}", removed.join("\n\t"))); + } + assert!(!result.is_empty(), "unexpected empty change detection"); + result.join("\n") +} + +// It can often be the case that files of a particular name on one platform +// can't actually be created on another platform. For example files with colons +// in the name are allowed on Unix but not on Windows. +// +// To help out in situations like this, issue about weird filenames when +// packaging as a "heads up" that something may not work on other platforms. +fn check_filename(file: &Path) -> CargoResult<()> { + let name = match file.file_name() { + Some(name) => name, + None => return Ok(()), + }; + let name = match name.to_str() { + Some(name) => name, + None => failure::bail!( + "path does not have a unicode filename which may not unpack \ + on all platforms: {}", + file.display() + ), + }; + let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*']; + if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) { + failure::bail!( + "cannot package a filename with a special character `{}`: {}", + c, + file.display() + ) + } Ok(()) } diff --git a/src/cargo/ops/cargo_pkgid.rs b/src/cargo/ops/cargo_pkgid.rs index d900510b380..56757bd58b8 100644 --- a/src/cargo/ops/cargo_pkgid.rs +++ b/src/cargo/ops/cargo_pkgid.rs @@ -1,28 +1,16 @@ -use std::path::Path; +use crate::core::{PackageIdSpec, Workspace}; +use crate::ops; +use crate::util::CargoResult; -use ops; -use core::{Source, PackageIdSpec}; -use sources::{PathSource}; -use util::{CargoResult, human, Config}; - -pub fn pkgid(manifest_path: &Path, - spec: Option<&str>, - config: &Config) -> CargoResult { - let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(), - config)); - try!(source.update()); - let package = try!(source.root_package()); - - let lockfile = package.root().join("Cargo.lock"); - let source_id = package.package_id().source_id(); - let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) { +pub fn pkgid(ws: &Workspace<'_>, spec: Option<&str>) -> CargoResult { + let resolve = match ops::load_pkg_lockfile(ws)? { Some(resolve) => resolve, - None => return Err(human("A Cargo.lock must exist for this command")) + None => failure::bail!("a Cargo.lock must exist for this command"), }; let pkgid = match spec { - Some(spec) => try!(resolve.query(spec)), - None => package.package_id(), + Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?, + None => ws.current()?.package_id(), }; Ok(PackageIdSpec::from_package_id(pkgid)) } diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs index 123a92039c1..ddd5944339f 100644 --- a/src/cargo/ops/cargo_read_manifest.rs +++ b/src/cargo/ops/cargo_read_manifest.rs @@ -1,98 +1,124 @@ use std::collections::{HashMap, HashSet}; -use std::fs::{self, File}; -use std::io::prelude::*; +use std::fs; use std::io; use std::path::{Path, PathBuf}; -use core::{Package, Manifest, SourceId, PackageId}; -use util::{self, CargoResult, human, Config, ChainError}; -use util::important_paths::find_project_manifest_exact; -use util::toml::{Layout, project_layout}; - -pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId, - config: &Config) - -> CargoResult<(Manifest, Vec)> { - let root = layout.root.clone(); - util::toml::to_manifest(contents, source_id, layout, config).chain_error(|| { - human(format!("failed to parse manifest at `{}`", - root.join("Cargo.toml").display())) - }) -} - -pub fn read_package(path: &Path, source_id: &SourceId, config: &Config) - -> CargoResult<(Package, Vec)> { - trace!("read_package; path={}; source-id={}", path.display(), source_id); - let mut file = try!(File::open(path)); - let mut data = Vec::new(); - try!(file.read_to_end(&mut data)); - - let layout = project_layout(path.parent().unwrap()); - let (manifest, nested) = - try!(read_manifest(&data, layout, source_id, config)); +use log::{info, trace}; + +use crate::core::{EitherManifest, Package, PackageId, SourceId}; +use crate::util::errors::CargoResult; +use crate::util::important_paths::find_project_manifest_exact; +use crate::util::toml::read_manifest; +use crate::util::{self, Config}; + +pub fn read_package( + path: &Path, + source_id: SourceId, + config: &Config, +) -> CargoResult<(Package, Vec)> { + trace!( + "read_package; path={}; source-id={}", + path.display(), + source_id + ); + let (manifest, nested) = read_manifest(path, source_id, config)?; + let manifest = match manifest { + EitherManifest::Real(manifest) => manifest, + EitherManifest::Virtual(..) => failure::bail!( + "found a virtual manifest at `{}` instead of a package \ + manifest", + path.display() + ), + }; Ok((Package::new(manifest, path), nested)) } -pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) - -> CargoResult> { +pub fn read_packages( + path: &Path, + source_id: SourceId, + config: &Config, +) -> CargoResult> { let mut all_packages = HashMap::new(); let mut visited = HashSet::::new(); + let mut errors = Vec::::new(); - trace!("looking for root package: {}, source_id={}", path.display(), source_id); + trace!( + "looking for root package: {}, source_id={}", + path.display(), + source_id + ); - try!(walk(path, &mut |dir| { + walk(path, &mut |dir| { trace!("looking for child package: {}", dir.display()); - // Don't recurse into git databases - if dir.file_name().and_then(|s| s.to_str()) == Some(".git") { - return Ok(false) - } - - // Don't automatically discover packages across git submodules - if dir != path && fs::metadata(&dir.join(".git")).is_ok() { - return Ok(false) + // Don't recurse into hidden/dot directories unless we're at the toplevel + if dir != path { + let name = dir.file_name().and_then(|s| s.to_str()); + if name.map(|s| s.starts_with('.')) == Some(true) { + return Ok(false); + } + + // Don't automatically discover packages across git submodules + if fs::metadata(&dir.join(".git")).is_ok() { + return Ok(false); + } } // Don't ever look at target directories - if dir.file_name().and_then(|s| s.to_str()) == Some("target") && - has_manifest(dir.parent().unwrap()) { - return Ok(false) + if dir.file_name().and_then(|s| s.to_str()) == Some("target") + && has_manifest(dir.parent().unwrap()) + { + return Ok(false); } if has_manifest(dir) { - try!(read_nested_packages(dir, &mut all_packages, source_id, config, - &mut visited)); + read_nested_packages( + dir, + &mut all_packages, + source_id, + config, + &mut visited, + &mut errors, + )?; } Ok(true) - })); + })?; if all_packages.is_empty() { - Err(human(format!("Could not find Cargo.toml in `{}`", path.display()))) + match errors.pop() { + Some(err) => Err(err), + None => Err(failure::format_err!( + "Could not find Cargo.toml in `{}`", + path.display() + )), + } } else { Ok(all_packages.into_iter().map(|(_, v)| v).collect()) } } -fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult) - -> CargoResult<()> { - if !try!(callback(path)) { +fn walk(path: &Path, callback: &mut dyn FnMut(&Path) -> CargoResult) -> CargoResult<()> { + if !callback(path)? { trace!("not processing {}", path.display()); - return Ok(()) + return Ok(()); } // Ignore any permission denied errors because temporary directories // can often have some weird permissions on them. let dirs = match fs::read_dir(path) { Ok(dirs) => dirs, - Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => { - return Ok(()) + Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()), + Err(e) => { + let cx = format!("failed to read directory `{}`", path.display()); + let e = failure::Error::from(e); + return Err(e.context(cx).into()); } - Err(e) => return Err(From::from(e)), }; for dir in dirs { - let dir = try!(dir); - if try!(dir.file_type()).is_dir() { - try!(walk(&dir.path(), callback)); + let dir = dir?; + if dir.file_type()?.is_dir() { + walk(&dir.path(), callback)?; } } Ok(()) @@ -102,22 +128,58 @@ fn has_manifest(path: &Path) -> bool { find_project_manifest_exact(path, "Cargo.toml").is_ok() } -fn read_nested_packages(path: &Path, - all_packages: &mut HashMap, - source_id: &SourceId, - config: &Config, - visited: &mut HashSet) -> CargoResult<()> { - if !visited.insert(path.to_path_buf()) { return Ok(()) } +fn read_nested_packages( + path: &Path, + all_packages: &mut HashMap, + source_id: SourceId, + config: &Config, + visited: &mut HashSet, + errors: &mut Vec, +) -> CargoResult<()> { + if !visited.insert(path.to_path_buf()) { + return Ok(()); + } - let manifest = try!(find_project_manifest_exact(path, "Cargo.toml")); + let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?; + + let (manifest, nested) = match read_manifest(&manifest_path, source_id, config) { + Err(err) => { + // Ignore malformed manifests found on git repositories + // + // git source try to find and read all manifests from the repository + // but since it's not possible to exclude folders from this search + // it's safer to ignore malformed manifests to avoid + // + // TODO: Add a way to exclude folders? + info!( + "skipping malformed package found at `{}`", + path.to_string_lossy() + ); + errors.push(err.into()); + return Ok(()); + } + Ok(tuple) => tuple, + }; - let (pkg, nested) = try!(read_package(&manifest, source_id, config)); - let pkg_id = pkg.package_id().clone(); - if !all_packages.contains_key(&pkg_id) { - all_packages.insert(pkg_id, pkg); - } else { - info!("skipping nested package `{}` found at `{}`", - pkg.name(), path.to_string_lossy()); + let manifest = match manifest { + EitherManifest::Real(manifest) => manifest, + EitherManifest::Virtual(..) => return Ok(()), + }; + let pkg = Package::new(manifest, &manifest_path); + + let pkg_id = pkg.package_id(); + use std::collections::hash_map::Entry; + match all_packages.entry(pkg_id) { + Entry::Vacant(v) => { + v.insert(pkg); + } + Entry::Occupied(_) => { + info!( + "skipping nested package `{}` found at `{}`", + pkg.name(), + path.to_string_lossy() + ); + } } // Registry sources are not allowed to have `path=` dependencies because @@ -131,8 +193,7 @@ fn read_nested_packages(path: &Path, if !source_id.is_registry() { for p in nested.iter() { let path = util::normalize_path(&path.join(p)); - try!(read_nested_packages(&path, all_packages, source_id, - config, visited)); + read_nested_packages(&path, all_packages, source_id, config, visited, errors)?; } } diff --git a/src/cargo/ops/cargo_run.rs b/src/cargo/ops/cargo_run.rs index 419a7ab155a..f9fb3cd176f 100644 --- a/src/cargo/ops/cargo_run.rs +++ b/src/cargo/ops/cargo_run.rs @@ -1,62 +1,96 @@ +use std::ffi::OsString; +use std::iter; use std::path::Path; -use ops::{self, ExecEngine, CompileFilter}; -use util::{self, CargoResult, human, process, ProcessError}; -use core::source::Source; -use sources::PathSource; - -pub fn run(manifest_path: &Path, - options: &ops::CompileOptions, - args: &[String]) -> CargoResult> { - let config = options.config; - let mut src = try!(PathSource::for_path(&manifest_path.parent().unwrap(), - config)); - try!(src.update()); - let root = try!(src.root_package()); - - let mut bins = root.manifest().targets().iter().filter(|a| { - !a.is_lib() && !a.is_custom_build() && match options.filter { - CompileFilter::Everything => a.is_bin(), - CompileFilter::Only { .. } => options.filter.matches(a), +use crate::core::{TargetKind, Workspace}; +use crate::ops; +use crate::util::{CargoResult, ProcessError}; + +pub fn run( + ws: &Workspace<'_>, + options: &ops::CompileOptions<'_>, + args: &[OsString], +) -> CargoResult> { + let config = ws.config(); + + // We compute the `bins` here *just for diagnosis*. The actual set of + // packages to be run is determined by the `ops::compile` call below. + let packages = options.spec.get_packages(ws)?; + let bins: Vec<_> = packages + .into_iter() + .flat_map(|pkg| { + iter::repeat(pkg).zip(pkg.manifest().targets().iter().filter(|target| { + !target.is_lib() + && !target.is_custom_build() + && if !options.filter.is_specific() { + target.is_bin() + } else { + options.filter.target_run(target) + } + })) + }) + .collect(); + + if bins.is_empty() { + if !options.filter.is_specific() { + failure::bail!("a bin target must be available for `cargo run`") + } else { + // This will be verified in `cargo_compile`. } - }); - if bins.next().is_none() { - match options.filter { - CompileFilter::Everything => { - return Err(human("a bin target must be available for \ - `cargo run`")) - } - CompileFilter::Only { .. } => { - // this will be verified in cargo_compile - } + } + + if bins.len() == 1 { + let target = bins[0].1; + if let TargetKind::ExampleLib(..) = target.kind() { + failure::bail!( + "example target `{}` is a library and cannot be executed", + target.name() + ) } } - if bins.next().is_some() { - match options.filter { - CompileFilter::Everything => { - return Err(human("`cargo run` requires that a project only have \ - one executable; use the `--bin` option to \ - specify which one to run")) - } - CompileFilter::Only { .. } => { - return Err(human("`cargo run` can run at most one executable, \ - but multiple were specified")) - } + + if bins.len() > 1 { + if !options.filter.is_specific() { + let names: Vec<&str> = bins + .into_iter() + .map(|(_pkg, target)| target.name()) + .collect(); + failure::bail!( + "`cargo run` could not determine which binary to run. \ + Use the `--bin` option to specify a binary, \ + or the `default-run` manifest key.\n\ + available binaries: {}", + names.join(", ") + ) + } else { + failure::bail!( + "`cargo run` can run at most one executable, but \ + multiple were specified" + ) } } - let compile = try!(ops::compile(manifest_path, options)); + let compile = ops::compile(ws, options)?; + assert_eq!(compile.binaries.len(), 1); let exe = &compile.binaries[0]; - let exe = match util::without_prefix(&exe, config.cwd()) { - Some(path) if path.file_name() == Some(path.as_os_str()) - => Path::new(".").join(path).to_path_buf(), - Some(path) => path.to_path_buf(), - None => exe.to_path_buf(), + let exe = match exe.strip_prefix(config.cwd()) { + Ok(path) if path.file_name() == Some(path.as_os_str()) => Path::new(".").join(path), + Ok(path) => path.to_path_buf(), + Err(_) => exe.to_path_buf(), }; - let mut process = try!(compile.target_process(exe, &root)) - .into_process_builder(); + let pkg = bins[0].0; + let mut process = compile.target_process(exe, pkg)?; process.args(args).cwd(config.cwd()); - try!(config.shell().status("Running", process.to_string())); - Ok(process.exec().err()) + config.shell().status("Running", process.to_string())?; + + let result = process.exec_replace(); + + match result { + Ok(()) => Ok(None), + Err(e) => { + let err = e.downcast::()?; + Ok(Some(err)) + } + } } diff --git a/src/cargo/ops/cargo_rustc/compilation.rs b/src/cargo/ops/cargo_rustc/compilation.rs deleted file mode 100644 index bd6be3c4c84..00000000000 --- a/src/cargo/ops/cargo_rustc/compilation.rs +++ /dev/null @@ -1,134 +0,0 @@ -use std::collections::{HashMap, HashSet}; -use std::ffi::OsStr; -use std::path::PathBuf; -use semver::Version; - -use core::{PackageId, Package, Target}; -use util::{self, CargoResult, Config}; - -use super::{CommandType, CommandPrototype}; - -/// A structure returning the result of a compilation. -pub struct Compilation<'cfg> { - /// All libraries which were built for a package. - /// - /// This is currently used for passing --extern flags to rustdoc tests later - /// on. - pub libraries: HashMap>, - - /// An array of all tests created during this compilation. - pub tests: Vec<(String, PathBuf)>, - - /// An array of all binaries created. - pub binaries: Vec, - - /// All directires for the output of native build commands. - /// - /// This is currently used to drive some entries which are added to the - /// LD_LIBRARY_PATH as appropriate. - // TODO: deprecated, remove - pub native_dirs: HashMap, - - /// Root output directory (for the local package's artifacts) - pub root_output: PathBuf, - - /// Output directory for rust dependencies - pub deps_output: PathBuf, - - /// Extra environment variables that were passed to compilations and should - /// be passed to future invocations of programs. - pub extra_env: HashMap, - - /// Top-level package that was compiled - pub package: Package, - - /// Features enabled during this compilation. - pub features: HashSet, - - config: &'cfg Config, -} - -impl<'cfg> Compilation<'cfg> { - pub fn new(pkg: &Package, config: &'cfg Config) -> Compilation<'cfg> { - Compilation { - libraries: HashMap::new(), - native_dirs: HashMap::new(), // TODO: deprecated, remove - root_output: PathBuf::from("/"), - deps_output: PathBuf::from("/"), - tests: Vec::new(), - binaries: Vec::new(), - extra_env: HashMap::new(), - package: pkg.clone(), - features: HashSet::new(), - config: config, - } - } - - /// See `process`. - pub fn rustc_process(&self, pkg: &Package) -> CargoResult { - self.process(CommandType::Rustc, pkg) - } - - /// See `process`. - pub fn rustdoc_process(&self, pkg: &Package) -> CargoResult { - self.process(CommandType::Rustdoc, pkg) - } - - /// See `process`. - pub fn target_process>(&self, cmd: T, pkg: &Package) - -> CargoResult { - self.process(CommandType::Target(cmd.as_ref().to_os_string()), pkg) - } - - /// See `process`. - pub fn host_process>(&self, cmd: T, pkg: &Package) - -> CargoResult { - self.process(CommandType::Host(cmd.as_ref().to_os_string()), pkg) - } - - /// Prepares a new process with an appropriate environment to run against - /// the artifacts produced by the build process. - /// - /// The package argument is also used to configure environment variables as - /// well as the working directory of the child process. - pub fn process(&self, cmd: CommandType, pkg: &Package) - -> CargoResult { - let mut search_path = util::dylib_path(); - for dir in self.native_dirs.values() { - search_path.push(dir.clone()); - } - search_path.push(self.root_output.clone()); - search_path.push(self.deps_output.clone()); - let search_path = try!(util::join_paths(&search_path, - util::dylib_path_envvar())); - let mut cmd = try!(CommandPrototype::new(cmd, self.config)); - cmd.env(util::dylib_path_envvar(), &search_path); - for (k, v) in self.extra_env.iter() { - cmd.env(k, v); - } - - cmd.env("CARGO_MANIFEST_DIR", pkg.root()) - .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) - .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) - .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) - .env("CARGO_PKG_VERSION_PRE", &pre_version_component(pkg.version())) - .env("CARGO_PKG_VERSION", &pkg.version().to_string()) - .cwd(pkg.root()); - Ok(cmd) - } -} - -fn pre_version_component(v: &Version) -> String { - if v.pre.is_empty() { - return String::new(); - } - - let mut ret = String::new(); - - for (i, x) in v.pre.iter().enumerate() { - if i != 0 { ret.push('.') }; - ret.push_str(&x.to_string()); - } - - ret -} diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs deleted file mode 100644 index d9565294d8a..00000000000 --- a/src/cargo/ops/cargo_rustc/context.rs +++ /dev/null @@ -1,525 +0,0 @@ -use std::collections::hash_map::Entry::{Occupied, Vacant}; -use std::collections::{HashSet, HashMap}; -use std::path::{Path, PathBuf}; -use std::str; -use std::sync::Arc; - -use regex::Regex; - -use core::{SourceMap, Package, PackageId, PackageSet, Resolve, Target, Profile}; -use core::{TargetKind, LibKind, Profiles, Metadata}; -use util::{self, CargoResult, ChainError, internal, Config, profile}; -use util::human; - -use super::TargetConfig; -use super::custom_build::BuildState; -use super::fingerprint::Fingerprint; -use super::layout::{Layout, LayoutProxy}; -use super::{Kind, Compilation, BuildConfig}; -use super::{ProcessEngine, ExecEngine}; - -#[derive(Debug, Clone, Copy)] -pub enum Platform { - Target, - Plugin, - PluginAndTarget, -} - -pub struct Context<'a, 'cfg: 'a> { - pub config: &'cfg Config, - pub resolve: &'a Resolve, - pub sources: &'a SourceMap<'cfg>, - pub compilation: Compilation<'cfg>, - pub build_state: Arc, - pub exec_engine: Arc>, - pub fingerprints: HashMap<(&'a PackageId, &'a Target, &'a Profile, Kind), - Fingerprint>, - pub compiled: HashSet<(&'a PackageId, &'a Target, &'a Profile)>, - pub build_config: BuildConfig, - pub build_scripts: HashMap<(&'a PackageId, &'a Target, &'a Profile, Kind), - Vec<&'a PackageId>>, - - host: Layout, - target: Option, - target_triple: String, - host_dylib: Option<(String, String)>, - host_exe: String, - package_set: &'a PackageSet, - target_dylib: Option<(String, String)>, - target_exe: String, - requirements: HashMap<(&'a PackageId, &'a str), Platform>, - profiles: &'a Profiles, -} - -impl<'a, 'cfg> Context<'a, 'cfg> { - pub fn new(resolve: &'a Resolve, - sources: &'a SourceMap<'cfg>, - deps: &'a PackageSet, - config: &'cfg Config, - host: Layout, - target_layout: Option, - root_pkg: &Package, - build_config: BuildConfig, - profiles: &'a Profiles) -> CargoResult> { - let target = build_config.requested_target.clone(); - let target = target.as_ref().map(|s| &s[..]); - let (target_dylib, target_exe) = try!(Context::filename_parts(target, - config)); - let (host_dylib, host_exe) = if build_config.requested_target.is_none() { - (target_dylib.clone(), target_exe.clone()) - } else { - try!(Context::filename_parts(None, config)) - }; - let target_triple = target.unwrap_or_else(|| { - &config.rustc_info().host[..] - }).to_string(); - let engine = build_config.exec_engine.as_ref().cloned().unwrap_or({ - Arc::new(Box::new(ProcessEngine)) - }); - Ok(Context { - target_triple: target_triple, - host: host, - target: target_layout, - resolve: resolve, - sources: sources, - package_set: deps, - config: config, - target_dylib: target_dylib, - target_exe: target_exe, - host_dylib: host_dylib, - host_exe: host_exe, - requirements: HashMap::new(), - compilation: Compilation::new(root_pkg, config), - build_state: Arc::new(BuildState::new(&build_config, deps)), - build_config: build_config, - exec_engine: engine, - fingerprints: HashMap::new(), - profiles: profiles, - compiled: HashSet::new(), - build_scripts: HashMap::new(), - }) - } - - /// Run `rustc` to discover the dylib prefix/suffix for the target - /// specified as well as the exe suffix - fn filename_parts(target: Option<&str>, cfg: &Config) - -> CargoResult<(Option<(String, String)>, String)> { - let mut process = try!(util::process(cfg.rustc())); - process.arg("-") - .arg("--crate-name").arg("_") - .arg("--crate-type").arg("dylib") - .arg("--crate-type").arg("bin") - .arg("--print=file-names") - .env_remove("RUST_LOG"); - if let Some(s) = target { - process.arg("--target").arg(s); - }; - let output = try!(process.exec_with_output()); - - let error = str::from_utf8(&output.stderr).unwrap(); - let output = str::from_utf8(&output.stdout).unwrap(); - let mut lines = output.lines(); - let nodylib = Regex::new("unsupported crate type.*dylib").unwrap(); - let nobin = Regex::new("unsupported crate type.*bin").unwrap(); - let dylib = if nodylib.is_match(error) { - None - } else { - let dylib_parts: Vec<&str> = lines.next().unwrap().trim() - .split('_').collect(); - assert!(dylib_parts.len() == 2, - "rustc --print-file-name output has changed"); - Some((dylib_parts[0].to_string(), dylib_parts[1].to_string())) - }; - - let exe_suffix = if nobin.is_match(error) { - String::new() - } else { - lines.next().unwrap().trim() - .split('_').skip(1).next().unwrap().to_string() - }; - Ok((dylib, exe_suffix.to_string())) - } - - /// Prepare this context, ensuring that all filesystem directories are in - /// place. - pub fn prepare(&mut self, pkg: &'a Package, - targets: &[(&'a Target, &'a Profile)]) - -> CargoResult<()> { - let _p = profile::start("preparing layout"); - - try!(self.host.prepare().chain_error(|| { - internal(format!("couldn't prepare build directories for `{}`", - pkg.name())) - })); - match self.target { - Some(ref mut target) => { - try!(target.prepare().chain_error(|| { - internal(format!("couldn't prepare build directories \ - for `{}`", pkg.name())) - })); - } - None => {} - } - - for &(target, profile) in targets { - self.build_requirements(pkg, target, profile, Kind::from(target)); - } - - let jobs = self.jobs(); - self.compilation.extra_env.insert("NUM_JOBS".to_string(), - jobs.to_string()); - self.compilation.root_output = - self.layout(pkg, Kind::Target).proxy().dest().to_path_buf(); - self.compilation.deps_output = - self.layout(pkg, Kind::Target).proxy().deps().to_path_buf(); - - return Ok(()); - } - - fn build_requirements(&mut self, pkg: &'a Package, target: &'a Target, - profile: &Profile, kind: Kind) { - let req = if kind == Kind::Host { Platform::Plugin } else { Platform::Target }; - - match self.requirements.entry((pkg.package_id(), target.name())) { - Occupied(mut entry) => match (*entry.get(), req) { - (Platform::Plugin, Platform::Plugin) | - (Platform::PluginAndTarget, Platform::Plugin) | - (Platform::Target, Platform::Target) | - (Platform::PluginAndTarget, Platform::Target) | - (Platform::PluginAndTarget, Platform::PluginAndTarget) => return, - _ => *entry.get_mut() = entry.get().combine(req), - }, - Vacant(entry) => { entry.insert(req); } - }; - - for (pkg, dep, profile) in self.dep_targets(pkg, target, kind, profile) { - self.build_requirements(pkg, dep, profile, kind.for_target(dep)); - } - - match pkg.targets().iter().find(|t| t.is_custom_build()) { - Some(custom_build) => { - let profile = self.build_script_profile(pkg.package_id()); - self.build_requirements(pkg, custom_build, profile, Kind::Host); - } - None => {} - } - } - - pub fn get_requirement(&self, pkg: &'a Package, - target: &'a Target) -> Platform { - let default = if target.for_host() { - Platform::Plugin - } else { - Platform::Target - }; - self.requirements.get(&(pkg.package_id(), target.name())) - .map(|a| *a).unwrap_or(default) - } - - /// Returns the appropriate directory layout for either a plugin or not. - pub fn layout(&self, pkg: &Package, kind: Kind) -> LayoutProxy { - let primary = pkg.package_id() == self.resolve.root(); - match kind { - Kind::Host => LayoutProxy::new(&self.host, primary), - Kind::Target => LayoutProxy::new(self.target.as_ref() - .unwrap_or(&self.host), - primary), - } - } - - /// Returns the appropriate output directory for the specified package and - /// target. - pub fn out_dir(&self, pkg: &Package, kind: Kind, target: &Target) -> PathBuf { - let out_dir = self.layout(pkg, kind); - if target.is_custom_build() { - out_dir.build(pkg) - } else if target.is_example() { - out_dir.examples().to_path_buf() - } else { - out_dir.root().to_path_buf() - } - } - - /// Return the (prefix, suffix) pair for dynamic libraries. - /// - /// If `plugin` is true, the pair corresponds to the host platform, - /// otherwise it corresponds to the target platform. - fn dylib(&self, kind: Kind) -> CargoResult<(&str, &str)> { - let (triple, pair) = if kind == Kind::Host { - (&self.config.rustc_info().host, &self.host_dylib) - } else { - (&self.target_triple, &self.target_dylib) - }; - match *pair { - None => return Err(human(format!("dylib outputs are not supported \ - for {}", triple))), - Some((ref s1, ref s2)) => Ok((s1, s2)), - } - } - - /// Return the target triple which this context is targeting. - pub fn target_triple(&self) -> &str { - &self.target_triple - } - - /// Get the metadata for a target in a specific profile - pub fn target_metadata(&self, pkg: &Package, target: &Target, - profile: &Profile) -> Option { - let metadata = target.metadata(); - if target.is_lib() && profile.test { - // Libs and their tests are built in parallel, so we need to make - // sure that their metadata is different. - metadata.map(|m| m.clone()).map(|mut m| { - m.mix(&"test"); - m - }) - } else if target.is_bin() && profile.test { - // Make sure that the name of this test executable doesn't - // conflict with a library that has the same name and is - // being tested - let mut metadata = pkg.generate_metadata(); - metadata.mix(&format!("bin-{}", target.name())); - Some(metadata) - } else if pkg.package_id() == self.resolve.root() && !profile.test { - // If we're not building a unit test then the root package never - // needs any metadata as it's guaranteed to not conflict with any - // other output filenames. This means that we'll have predictable - // file names like `target/debug/libfoo.{a,so,rlib}` and such. - None - } else { - metadata.map(|m| m.clone()) - } - } - - /// Returns the file stem for a given target/profile combo - pub fn file_stem(&self, pkg: &Package, target: &Target, - profile: &Profile) -> String { - match self.target_metadata(pkg, target, profile) { - Some(ref metadata) => format!("{}{}", target.crate_name(), - metadata.extra_filename), - None if target.allows_underscores() => target.name().to_string(), - None => target.crate_name().to_string(), - } - } - - /// Return the filenames that the given target for the given profile will - /// generate. - pub fn target_filenames(&self, pkg: &Package, target: &Target, - profile: &Profile, kind: Kind) - -> CargoResult> { - let stem = self.file_stem(pkg, target, profile); - let suffix = if target.for_host() {&self.host_exe} else {&self.target_exe}; - - let mut ret = Vec::new(); - match *target.kind() { - TargetKind::Example | TargetKind::Bin | TargetKind::CustomBuild | - TargetKind::Bench | TargetKind::Test => { - ret.push(format!("{}{}", stem, suffix)); - } - TargetKind::Lib(..) if profile.test => { - ret.push(format!("{}{}", stem, suffix)); - } - TargetKind::Lib(ref libs) => { - for lib in libs.iter() { - match *lib { - LibKind::Dylib => { - let (prefix, suffix) = try!(self.dylib(kind)); - ret.push(format!("{}{}{}", prefix, stem, suffix)); - } - LibKind::Lib | - LibKind::Rlib => ret.push(format!("lib{}.rlib", stem)), - LibKind::StaticLib => ret.push(format!("lib{}.a", stem)), - } - } - } - } - assert!(ret.len() > 0); - return Ok(ret); - } - - /// For a package, return all targets which are registered as dependencies - /// for that package. - pub fn dep_targets(&self, pkg: &Package, target: &Target, kind: Kind, - profile: &Profile) - -> Vec<(&'a Package, &'a Target, &'a Profile)> { - if profile.doc { - return self.doc_deps(pkg, target); - } - let deps = match self.resolve.deps(pkg.package_id()) { - None => return Vec::new(), - Some(deps) => deps, - }; - let mut ret = deps.map(|id| self.get_package(id)).filter(|dep| { - pkg.dependencies().iter().filter(|d| { - d.name() == dep.name() - }).any(|d| { - // If this target is a build command, then we only want build - // dependencies, otherwise we want everything *other than* build - // dependencies. - let is_correct_dep = target.is_custom_build() == d.is_build(); - - // If this dependency is *not* a transitive dependency, then it - // only applies to test/example targets - let is_actual_dep = d.is_transitive() || - target.is_test() || - target.is_example() || - profile.test; - - // If this dependency is only available for certain platforms, - // make sure we're only enabling it for that platform. - let is_platform_same = match (d.only_for_platform(), kind) { - (Some(ref platform), Kind::Host) => { - *platform == self.config.rustc_info().host - }, - (Some(ref platform), Kind::Target) => { - *platform == self.target_triple - }, - (None, _) => true - }; - - // If the dependency is optional, then we're only activating it - // if the corresponding feature was activated - let activated = !d.is_optional() || - self.resolve.features(pkg.package_id()).map(|f| { - f.contains(d.name()) - }).unwrap_or(false); - - is_correct_dep && is_actual_dep && is_platform_same && activated - }) - }).filter_map(|pkg| { - pkg.targets().iter().find(|t| t.is_lib()).map(|t| { - (pkg, t, self.lib_profile(pkg.package_id())) - }) - }).collect::>(); - - // If a target isn't actually a build script itself, then it depends on - // the build script if there is one. - if target.is_custom_build() { return ret } - let pkg = self.get_package(pkg.package_id()); - if let Some(t) = pkg.targets().iter().find(|t| t.is_custom_build()) { - ret.push((pkg, t, self.build_script_profile(pkg.package_id()))); - } - - // If this target is a binary, test, example, etc, then it depends on - // the library of the same package. The call to `resolve.deps` above - // didn't include `pkg` in the return values, so we need to special case - // it here and see if we need to push `(pkg, pkg_lib_target)`. - if target.is_lib() { return ret } - if let Some(t) = pkg.targets().iter().find(|t| t.linkable()) { - ret.push((pkg, t, self.lib_profile(pkg.package_id()))); - } - - // Integration tests/benchmarks require binaries to be built - if profile.test && (target.is_test() || target.is_bench()) { - ret.extend(pkg.targets().iter().filter(|t| t.is_bin()) - .map(|t| (pkg, t, self.lib_profile(pkg.package_id())))); - } - return ret - } - - /// Returns the dependencies necessary to document a package - fn doc_deps(&self, pkg: &Package, target: &Target) - -> Vec<(&'a Package, &'a Target, &'a Profile)> { - let pkg = self.get_package(pkg.package_id()); - let deps = self.resolve.deps(pkg.package_id()).into_iter(); - let deps = deps.flat_map(|a| a).map(|id| { - self.get_package(id) - }).filter(|dep| { - pkg.dependencies().iter().find(|d| { - d.name() == dep.name() - }).unwrap().is_transitive() - }).filter_map(|dep| { - dep.targets().iter().find(|t| t.is_lib()).map(|t| (dep, t)) - }); - - // To document a library, we depend on dependencies actually being - // built. If we're documenting *all* libraries, then we also depend on - // the documentation of the library being built. - let mut ret = Vec::new(); - for (dep, lib) in deps { - ret.push((dep, lib, self.lib_profile(dep.package_id()))); - if self.build_config.doc_all { - ret.push((dep, lib, &self.profiles.doc)); - } - } - - // Be sure to build/run the build script for documented libraries as - if let Some(t) = pkg.targets().iter().find(|t| t.is_custom_build()) { - ret.push((pkg, t, self.build_script_profile(pkg.package_id()))); - } - - // If we document a binary, we need the library available - if target.is_bin() { - if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { - ret.push((pkg, t, self.lib_profile(pkg.package_id()))); - } - } - return ret - } - - /// Gets a package for the given package id. - pub fn get_package(&self, id: &PackageId) -> &'a Package { - self.package_set.iter() - .find(|pkg| id == pkg.package_id()) - .expect("Should have found package") - } - - /// Get the user-specified linker for a particular host or target - pub fn linker(&self, kind: Kind) -> Option<&Path> { - self.target_config(kind).linker.as_ref().map(|s| s.as_ref()) - } - - /// Get the user-specified `ar` program for a particular host or target - pub fn ar(&self, kind: Kind) -> Option<&Path> { - self.target_config(kind).ar.as_ref().map(|s| s.as_ref()) - } - - /// Get the target configuration for a particular host or target - fn target_config(&self, kind: Kind) -> &TargetConfig { - match kind { - Kind::Host => &self.build_config.host, - Kind::Target => &self.build_config.target, - } - } - - /// Number of jobs specified for this build - pub fn jobs(&self) -> u32 { self.build_config.jobs } - - /// Requested (not actual) target for the build - pub fn requested_target(&self) -> Option<&str> { - self.build_config.requested_target.as_ref().map(|s| &s[..]) - } - - pub fn lib_profile(&self, _pkg: &PackageId) -> &'a Profile { - if self.build_config.release { - &self.profiles.release - } else { - &self.profiles.dev - } - } - - pub fn build_script_profile(&self, _pkg: &PackageId) -> &'a Profile { - // TODO: should build scripts always be built with a dev - // profile? How is this controlled at the CLI layer? - &self.profiles.dev - } -} - -impl Platform { - pub fn combine(self, other: Platform) -> Platform { - match (self, other) { - (Platform::Target, Platform::Target) => Platform::Target, - (Platform::Plugin, Platform::Plugin) => Platform::Plugin, - _ => Platform::PluginAndTarget, - } - } - - pub fn includes(self, kind: Kind) -> bool { - match (self, kind) { - (Platform::PluginAndTarget, _) | - (Platform::Target, Kind::Target) | - (Platform::Plugin, Kind::Host) => true, - _ => false, - } - } -} diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs deleted file mode 100644 index a12f27bfd6f..00000000000 --- a/src/cargo/ops/cargo_rustc/custom_build.rs +++ /dev/null @@ -1,426 +0,0 @@ -use std::collections::HashMap; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::path::PathBuf; -use std::str; -use std::sync::Mutex; - -use core::{Package, Target, PackageId, PackageSet, Profile}; -use util::{CargoResult, human, Human}; -use util::{internal, ChainError, profile}; - -use super::job::Work; -use super::{fingerprint, process, Kind, Context, Platform}; -use super::CommandType; -use util::Freshness; - -/// Contains the parsed output of a custom build script. -#[derive(Clone, Debug)] -pub struct BuildOutput { - /// Paths to pass to rustc with the `-L` flag - pub library_paths: Vec, - /// Names and link kinds of libraries, suitable for the `-l` flag - pub library_links: Vec, - /// Various `--cfg` flags to pass to the compiler - pub cfgs: Vec, - /// Metadata to pass to the immediate dependencies - pub metadata: Vec<(String, String)>, -} - -pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>; - -pub struct BuildState { - pub outputs: Mutex, -} - -/// Prepares a `Work` that executes the target as a custom build script. -/// -/// The `req` given is the requirement which this run of the build script will -/// prepare work for. If the requirement is specified as both the target and the -/// host platforms it is assumed that the two are equal and the build script is -/// only run once (not twice). -pub fn prepare(pkg: &Package, target: &Target, req: Platform, - cx: &mut Context) -> CargoResult<(Work, Work, Freshness)> { - let _p = profile::start(format!("build script prepare: {}/{}", - pkg, target.name())); - let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target, }; - let (script_output, build_output) = { - (cx.layout(pkg, Kind::Host).build(pkg), - cx.layout(pkg, kind).build_out(pkg)) - }; - - // Building the command to execute - let to_exec = script_output.join(target.name()); - - // Start preparing the process to execute, starting out with some - // environment variables. Note that the profile-related environment - // variables are not set with this the build script's profile but rather the - // package's library profile. - let profile = cx.lib_profile(pkg.package_id()); - let to_exec = to_exec.into_os_string(); - let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx)); - p.env("OUT_DIR", &build_output) - .env("CARGO_MANIFEST_DIR", pkg.root()) - .env("NUM_JOBS", &cx.jobs().to_string()) - .env("TARGET", &match kind { - Kind::Host => &cx.config.rustc_info().host[..], - Kind::Target => cx.target_triple(), - }) - .env("DEBUG", &profile.debuginfo.to_string()) - .env("OPT_LEVEL", &profile.opt_level.to_string()) - .env("PROFILE", if cx.build_config.release {"release"} else {"debug"}) - .env("HOST", &cx.config.rustc_info().host); - - // Be sure to pass along all enabled features for this package, this is the - // last piece of statically known information that we have. - match cx.resolve.features(pkg.package_id()) { - Some(features) => { - for feat in features.iter() { - p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); - } - } - None => {} - } - - // Gather the set of native dependencies that this package has along with - // some other variables to close over. - // - // This information will be used at build-time later on to figure out which - // sorts of variables need to be discovered at that time. - let lib_deps = { - let not_custom = pkg.targets().iter().find(|t| { - !t.is_custom_build() - }).unwrap(); - cx.dep_targets(pkg, not_custom, kind, profile).iter() - .filter_map(|&(pkg, t, _)| { - if !t.linkable() { return None } - pkg.manifest().links().map(|links| { - (links.to_string(), pkg.package_id().clone()) - }) - }).collect::>() - }; - let pkg_name = pkg.to_string(); - let build_state = cx.build_state.clone(); - let id = pkg.package_id().clone(); - let all = (id.clone(), pkg_name.clone(), build_state.clone(), - build_output.clone()); - let plugin_deps = super::load_build_deps(cx, pkg, target, profile, - Kind::Host); - - try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg))); - try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg))); - - let exec_engine = cx.exec_engine.clone(); - - // Prepare the unit of "dirty work" which will actually run the custom build - // command. - // - // Note that this has to do some extra work just before running the command - // to determine extra environment variables and such. - let work = Work::new(move |desc_tx| { - // Make sure that OUT_DIR exists. - // - // If we have an old build directory, then just move it into place, - // otherwise create it! - if fs::metadata(&build_output).is_err() { - try!(fs::create_dir(&build_output).chain_error(|| { - internal("failed to create script output directory for \ - build command") - })); - } - - // For all our native lib dependencies, pick up their metadata to pass - // along to this custom build command. We're also careful to augment our - // dynamic library search path in case the build script depended on any - // native dynamic libraries. - { - let build_state = build_state.outputs.lock().unwrap(); - for &(ref name, ref id) in lib_deps.iter() { - let data = &build_state[&(id.clone(), kind)].metadata; - for &(ref key, ref value) in data.iter() { - p.env(&format!("DEP_{}_{}", super::envify(name), - super::envify(key)), value); - } - } - try!(super::add_plugin_deps(&mut p, &build_state, plugin_deps)); - } - - // And now finally, run the build command itself! - desc_tx.send(p.to_string()).ok(); - let output = try!(exec_engine.exec_with_output(p).map_err(|mut e| { - e.desc = format!("failed to run custom build command for `{}`\n{}", - pkg_name, e.desc); - Human(e) - })); - - // After the build command has finished running, we need to be sure to - // remember all of its output so we can later discover precisely what it - // was, even if we don't run the build command again (due to freshness). - // - // This is also the location where we provide feedback into the build - // state informing what variables were discovered via our script as - // well. - let output = try!(str::from_utf8(&output.stdout).map_err(|_| { - human("build script output was not valid utf-8") - })); - let parsed_output = try!(BuildOutput::parse(output, &pkg_name)); - build_state.insert(id, req, parsed_output); - - try!(File::create(&build_output.parent().unwrap().join("output")) - .and_then(|mut f| f.write_all(output.as_bytes())) - .map_err(|e| { - human(format!("failed to write output of custom build command: {}", - e)) - })); - - Ok(()) - }); - - // Now that we've prepared our work-to-do, we need to prepare the fresh work - // itself to run when we actually end up just discarding what we calculated - // above. - // - // Note that the freshness calculation here is the build_cmd freshness, not - // target specific freshness. This is because we don't actually know what - // the inputs are to this command! - // - // Also note that a fresh build command needs to - let (freshness, dirty, fresh) = - try!(fingerprint::prepare_build_cmd(cx, pkg, kind)); - let dirty = Work::new(move |tx| { - try!(work.call((tx.clone()))); - dirty.call(tx) - }); - let fresh = Work::new(move |tx| { - let (id, pkg_name, build_state, build_output) = all; - let new_loc = build_output.parent().unwrap().join("output"); - let mut f = try!(File::open(&new_loc).map_err(|e| { - human(format!("failed to read cached build command output: {}", e)) - })); - let mut contents = String::new(); - try!(f.read_to_string(&mut contents)); - let output = try!(BuildOutput::parse(&contents, &pkg_name)); - build_state.insert(id, req, output); - - fresh.call(tx) - }); - - Ok((dirty, fresh, freshness)) -} - -impl BuildState { - pub fn new(config: &super::BuildConfig, - packages: &PackageSet) -> BuildState { - let mut sources = HashMap::new(); - for package in packages.iter() { - match package.manifest().links() { - Some(links) => { - sources.insert(links.to_string(), - package.package_id().clone()); - } - None => {} - } - } - let mut outputs = HashMap::new(); - let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host)); - let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target)); - for ((name, output), kind) in i1.chain(i2) { - // If no package is using the library named `name`, then this is - // just an override that we ignore. - if let Some(id) = sources.get(name) { - outputs.insert((id.clone(), kind), output.clone()); - } - } - BuildState { outputs: Mutex::new(outputs) } - } - - fn insert(&self, id: PackageId, req: Platform, - output: BuildOutput) { - let mut outputs = self.outputs.lock().unwrap(); - match req { - Platform::Target => { outputs.insert((id, Kind::Target), output); } - Platform::Plugin => { outputs.insert((id, Kind::Host), output); } - - // If this build output was for both the host and target platforms, - // we need to insert it at both places. - Platform::PluginAndTarget => { - outputs.insert((id.clone(), Kind::Host), output.clone()); - outputs.insert((id, Kind::Target), output); - } - } - } -} - -impl BuildOutput { - // Parses the output of a script. - // The `pkg_name` is used for error messages. - pub fn parse(input: &str, pkg_name: &str) -> CargoResult { - let mut library_paths = Vec::new(); - let mut library_links = Vec::new(); - let mut cfgs = Vec::new(); - let mut metadata = Vec::new(); - let whence = format!("build script of `{}`", pkg_name); - - for line in input.lines() { - let mut iter = line.splitn(2, ':'); - if iter.next() != Some("cargo") { - // skip this line since it doesn't start with "cargo:" - continue; - } - let data = match iter.next() { - Some(val) => val, - None => continue - }; - - // getting the `key=value` part of the line - let mut iter = data.splitn(2, '='); - let key = iter.next(); - let value = iter.next(); - let (key, value) = match (key, value) { - (Some(a), Some(b)) => (a, b.trim_right()), - // line started with `cargo:` but didn't match `key=value` - _ => return Err(human(format!("Wrong output in {}: `{}`", - whence, line))) - }; - - match key { - "rustc-flags" => { - let (libs, links) = try!( - BuildOutput::parse_rustc_flags(value, &whence) - ); - library_links.extend(links.into_iter()); - library_paths.extend(libs.into_iter()); - } - "rustc-link-lib" => library_links.push(value.to_string()), - "rustc-link-search" => library_paths.push(PathBuf::from(value)), - "rustc-cfg" => cfgs.push(value.to_string()), - _ => metadata.push((key.to_string(), value.to_string())), - } - } - - Ok(BuildOutput { - library_paths: library_paths, - library_links: library_links, - cfgs: cfgs, - metadata: metadata, - }) - } - - pub fn parse_rustc_flags(value: &str, whence: &str) - -> CargoResult<(Vec, Vec)> { - let value = value.trim(); - let mut flags_iter = value.split(|c: char| c.is_whitespace()) - .filter(|w| w.chars().any(|c| !c.is_whitespace())); - let (mut library_links, mut library_paths) = (Vec::new(), Vec::new()); - loop { - let flag = match flags_iter.next() { - Some(f) => f, - None => break - }; - if flag != "-l" && flag != "-L" { - return Err(human(format!("Only `-l` and `-L` flags are allowed \ - in {}: `{}`", - whence, value))) - } - let value = match flags_iter.next() { - Some(v) => v, - None => return Err(human(format!("Flag in rustc-flags has no \ - value in {}: `{}`", - whence, value))) - }; - match flag { - "-l" => library_links.push(value.to_string()), - "-L" => library_paths.push(PathBuf::from(value)), - - // was already checked above - _ => return Err(human("only -l and -L flags are allowed")) - }; - } - Ok((library_paths, library_links)) - } -} - -/// Compute the `build_scripts` map in the `Context` which tracks what build -/// scripts each package depends on. -/// -/// The global `build_scripts` map lists for all (package, kind) tuples what set -/// of packages' build script outputs must be considered. For example this lists -/// all dependencies' `-L` flags which need to be propagated transitively. -/// -/// The given set of targets to this function is the initial set of -/// targets/profiles which are being built. -pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, - pkg: &'b Package, - targets: &[(&'b Target, &'b Profile)]) { - let mut ret = HashMap::new(); - for &(target, profile) in targets { - build(&mut ret, Kind::Target, pkg, target, profile, cx); - build(&mut ret, Kind::Host, pkg, target, profile, cx); - } - - // Make the output a little more deterministic by sorting all dependencies - for (&(id, target, _, kind), slot) in ret.iter_mut() { - slot.sort(); - slot.dedup(); - debug!("script deps: {}/{}/{:?} => {:?}", id, target.name(), kind, - slot.iter().map(|s| s.to_string()).collect::>()); - } - cx.build_scripts = ret; - - // Recursive function to build up the map we're constructing. This function - // memoizes all of its return values as it goes along. - fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<(&'b PackageId, &'b Target, - &'b Profile, Kind), - Vec<&'b PackageId>>, - kind: Kind, - pkg: &'b Package, - target: &'b Target, - profile: &'b Profile, - cx: &Context<'b, 'cfg>) - -> &'a [&'b PackageId] { - // If this target has crossed into "host-land" we need to change the - // kind that we're compiling for, and otherwise just do a quick - // pre-flight check to see if we've already calculated the set of - // dependencies. - let kind = kind.for_target(target); - let id = pkg.package_id(); - if out.contains_key(&(id, target, profile, kind)) { - return &out[&(id, target, profile, kind)] - } - - // This loop is both the recursive and additive portion of this - // function, the key part of the logic being around determining the - // right `kind` to recurse on. If a dependency fits in the kind that - // we've got specified, then we just keep plazing a trail, but otherwise - // we *switch* the kind we're looking at because it must fit into the - // other category. - // - // We always recurse, but only add to our own array if the target is - // linkable to us (e.g. not a binary) and it's for the same original - // `kind`. - let mut ret = Vec::new(); - for (pkg, target, p) in cx.dep_targets(pkg, target, kind, profile) { - let req = cx.get_requirement(pkg, target); - - let dep_kind = if req.includes(kind) { - kind - } else if kind == Kind::Target { - Kind::Host - } else { - Kind::Target - }; - let dep_scripts = build(out, dep_kind, pkg, target, p, cx); - - if target.linkable() && kind == dep_kind { - if pkg.has_custom_build() { - ret.push(pkg.package_id()); - } - ret.extend(dep_scripts.iter().cloned()); - } - } - - let prev = out.entry((id, target, profile, kind)).or_insert(Vec::new()); - prev.extend(ret); - return prev - } -} diff --git a/src/cargo/ops/cargo_rustc/engine.rs b/src/cargo/ops/cargo_rustc/engine.rs deleted file mode 100644 index 41a3188bbf5..00000000000 --- a/src/cargo/ops/cargo_rustc/engine.rs +++ /dev/null @@ -1,107 +0,0 @@ -use std::collections::HashMap; -use std::ffi::{OsStr, OsString}; -use std::fmt; -use std::path::Path; -use std::process::Output; - -use util::{CargoResult, ProcessError, ProcessBuilder, process}; -use util::Config; - -/// Trait for objects that can execute commands. -pub trait ExecEngine: Send + Sync { - fn exec(&self, CommandPrototype) -> Result<(), ProcessError>; - fn exec_with_output(&self, CommandPrototype) -> Result; -} - -/// Default implementation of `ExecEngine`. -#[derive(Clone, Copy)] -pub struct ProcessEngine; - -impl ExecEngine for ProcessEngine { - fn exec(&self, command: CommandPrototype) -> Result<(), ProcessError> { - command.into_process_builder().exec() - } - - fn exec_with_output(&self, command: CommandPrototype) - -> Result { - command.into_process_builder().exec_with_output() - } -} - -/// Prototype for a command that must be executed. -#[derive(Clone)] -pub struct CommandPrototype { - ty: CommandType, - builder: ProcessBuilder, -} - -impl CommandPrototype { - pub fn new(ty: CommandType, config: &Config) - -> CargoResult { - Ok(CommandPrototype { - builder: try!(match ty { - CommandType::Rustc => process(config.rustc()), - CommandType::Rustdoc => process(config.rustdoc()), - CommandType::Target(ref s) | - CommandType::Host(ref s) => process(s), - }), - ty: ty, - }) - } - - pub fn get_type(&self) -> &CommandType { &self.ty } - - pub fn arg>(&mut self, arg: T) -> &mut CommandPrototype { - self.builder.arg(arg); - self - } - - pub fn args>(&mut self, arguments: &[T]) -> &mut CommandPrototype { - self.builder.args(arguments); - self - } - - pub fn cwd>(&mut self, path: T) -> &mut CommandPrototype { - self.builder.cwd(path); - self - } - - pub fn env>(&mut self, key: &str, val: T) - -> &mut CommandPrototype { - self.builder.env(key, val); - self - } - - pub fn get_args(&self) -> &[OsString] { self.builder.get_args() } - pub fn get_cwd(&self) -> &Path { self.builder.get_cwd() } - - pub fn get_env(&self, var: &str) -> Option { - self.builder.get_env(var) - } - - pub fn get_envs(&self) -> &HashMap> { - self.builder.get_envs() - } - - pub fn into_process_builder(self) -> ProcessBuilder { - self.builder - } -} - -impl fmt::Display for CommandPrototype { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.builder.fmt(f) - } -} - -#[derive(Clone, Debug)] -pub enum CommandType { - Rustc, - Rustdoc, - - /// The command is to be executed for the target architecture. - Target(OsString), - - /// The command is to be executed for the host architecture. - Host(OsString), -} diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs deleted file mode 100644 index 4cfd0dcd74e..00000000000 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ /dev/null @@ -1,427 +0,0 @@ -use std::fs::{self, File, OpenOptions}; -use std::io::prelude::*; -use std::io::{BufReader, SeekFrom}; -use std::path::{Path, PathBuf}; -use std::sync::{Arc, Mutex}; - -use filetime::FileTime; - -use core::{Package, Target, Profile}; -use util; -use util::{CargoResult, Fresh, Dirty, Freshness, internal, profile, ChainError}; - -use super::Kind; -use super::job::Work; -use super::context::Context; - -/// A tuple result of the `prepare_foo` functions in this module. -/// -/// The first element of the triple is whether the target in question is -/// currently fresh or not, and the second two elements are work to perform when -/// the target is dirty or fresh, respectively. -/// -/// Both units of work are always generated because a fresh package may still be -/// rebuilt if some upstream dependency changes. -pub type Preparation = (Freshness, Work, Work); - -/// Prepare the necessary work for the fingerprint for a specific target. -/// -/// When dealing with fingerprints, cargo gets to choose what granularity -/// "freshness" is considered at. One option is considering freshness at the -/// package level. This means that if anything in a package changes, the entire -/// package is rebuilt, unconditionally. This simplicity comes at a cost, -/// however, in that test-only changes will cause libraries to be rebuilt, which -/// is quite unfortunate! -/// -/// The cost was deemed high enough that fingerprints are now calculated at the -/// layer of a target rather than a package. Each target can then be kept track -/// of separately and only rebuilt as necessary. This requires cargo to -/// understand what the inputs are to a target, so we drive rustc with the -/// --dep-info flag to learn about all input files to a unit of compilation. -/// -/// This function will calculate the fingerprint for a target and prepare the -/// work necessary to either write the fingerprint or copy over all fresh files -/// from the old directories to their new locations. -pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, - pkg: &'a Package, - target: &'a Target, - profile: &'a Profile, - kind: Kind) -> CargoResult { - let _p = profile::start(format!("fingerprint: {} / {}", - pkg.package_id(), target.name())); - let new = dir(cx, pkg, kind); - let loc = new.join(&filename(target, profile)); - - info!("fingerprint at: {}", loc.display()); - - let mut fingerprint = try!(calculate(cx, pkg, target, profile, kind)); - let is_fresh = try!(is_fresh(&loc, &mut fingerprint)); - - let root = cx.out_dir(pkg, kind, target); - let mut missing_outputs = false; - if !profile.doc { - for filename in try!(cx.target_filenames(pkg, target, profile, - kind)).iter() { - missing_outputs |= fs::metadata(root.join(filename)).is_err(); - } - } - - let allow_failure = profile.rustc_args.is_some(); - Ok(prepare(is_fresh && !missing_outputs, allow_failure, loc, fingerprint)) -} - -/// A fingerprint can be considered to be a "short string" representing the -/// state of a world for a package. -/// -/// If a fingerprint ever changes, then the package itself needs to be -/// recompiled. Inputs to the fingerprint include source code modifications, -/// compiler flags, compiler version, etc. This structure is not simply a -/// `String` due to the fact that some fingerprints cannot be calculated lazily. -/// -/// Path sources, for example, use the mtime of the corresponding dep-info file -/// as a fingerprint (all source files must be modified *before* this mtime). -/// This dep-info file is not generated, however, until after the crate is -/// compiled. As a result, this structure can be thought of as a fingerprint -/// to-be. The actual value can be calculated via `resolve()`, but the operation -/// may fail as some files may not have been generated. -/// -/// Note that dependencies are taken into account for fingerprints because rustc -/// requires that whenever an upstream crate is recompiled that all downstream -/// dependants are also recompiled. This is typically tracked through -/// `DependencyQueue`, but it also needs to be retained here because Cargo can -/// be interrupted while executing, losing the state of the `DependencyQueue` -/// graph. -pub type Fingerprint = Arc; -struct FingerprintInner { - extra: String, - deps: Vec, - local: LocalFingerprint, - resolved: Mutex>, -} - -#[derive(Clone)] -enum LocalFingerprint { - Precalculated(String), - MtimeBased(Option, PathBuf), -} - -impl FingerprintInner { - fn resolve(&self, force: bool) -> CargoResult { - if !force { - if let Some(ref s) = *self.resolved.lock().unwrap() { - return Ok(s.clone()) - } - } - let mut deps: Vec<_> = try!(self.deps.iter().map(|s| { - s.resolve(force) - }).collect()); - deps.sort(); - let known = match self.local { - LocalFingerprint::Precalculated(ref s) => s.clone(), - LocalFingerprint::MtimeBased(Some(n), _) if !force => n.to_string(), - LocalFingerprint::MtimeBased(_, ref p) => { - debug!("resolving: {}", p.display()); - let meta = try!(fs::metadata(p)); - FileTime::from_last_modification_time(&meta).to_string() - } - }; - let resolved = util::short_hash(&(&known, &self.extra, &deps)); - debug!("inputs: {} {} {:?} => {}", known, self.extra, deps, resolved); - *self.resolved.lock().unwrap() = Some(resolved.clone()); - Ok(resolved) - } -} - -/// Calculates the fingerprint for a package/target pair. -/// -/// This fingerprint is used by Cargo to learn about when information such as: -/// -/// * A non-path package changes (changes version, changes revision, etc). -/// * Any dependency changes -/// * The compiler changes -/// * The set of features a package is built with changes -/// * The profile a target is compiled with changes (e.g. opt-level changes) -/// -/// Information like file modification time is only calculated for path -/// dependencies and is calculated in `calculate_target_fresh`. -fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, - pkg: &'a Package, - target: &'a Target, - profile: &'a Profile, - kind: Kind) - -> CargoResult { - let key = (pkg.package_id(), target, profile, kind); - match cx.fingerprints.get(&key) { - Some(s) => return Ok(s.clone()), - None => {} - } - - // First, calculate all statically known "salt data" such as the profile - // information (compiler flags), the compiler version, activated features, - // and target configuration. - let features = cx.resolve.features(pkg.package_id()); - let features = features.map(|s| { - let mut v = s.iter().collect::>(); - v.sort(); - v - }); - let extra = util::short_hash(&(&cx.config.rustc_info().verbose_version, - target, &features, profile)); - debug!("extra {:?} {:?} {:?} = {}", target, profile, features, extra); - - // Next, recursively calculate the fingerprint for all of our dependencies. - // - // Skip the fingerprints of build scripts as they may not always be - // available and the dirtiness propagation for modification is tracked - // elsewhere. Also skip fingerprints of binaries because they don't actually - // induce a recompile, they're just dependencies in the sense that they need - // to be built. - let deps = try!(cx.dep_targets(pkg, target, kind, profile).into_iter() - .filter(|&(_, t, _)| !t.is_custom_build() && !t.is_bin()) - .map(|(pkg, target, profile)| { - let kind = match kind { - Kind::Host => Kind::Host, - Kind::Target if target.for_host() => Kind::Host, - Kind::Target => Kind::Target, - }; - calculate(cx, pkg, target, profile, kind) - }).collect::>>()); - - // And finally, calculate what our own local fingerprint is - let local = if use_dep_info(pkg, profile) { - let dep_info = dep_info_loc(cx, pkg, target, profile, kind); - let mtime = try!(calculate_target_mtime(&dep_info)); - - // if the mtime listed is not fresh, then remove the `dep_info` file to - // ensure that future calls to `resolve()` won't work. - if mtime.is_none() { - let _ = fs::remove_file(&dep_info); - } - LocalFingerprint::MtimeBased(mtime, dep_info) - } else { - LocalFingerprint::Precalculated(try!(calculate_pkg_fingerprint(cx, pkg))) - }; - let fingerprint = Arc::new(FingerprintInner { - extra: extra, - deps: deps, - local: local, - resolved: Mutex::new(None), - }); - cx.fingerprints.insert(key, fingerprint.clone()); - Ok(fingerprint) -} - - -// We want to use the mtime for files if we're a path source, but if we're a -// git/registry source, then the mtime of files may fluctuate, but they won't -// change so long as the source itself remains constant (which is the -// responsibility of the source) -fn use_dep_info(pkg: &Package, profile: &Profile) -> bool { - let path = pkg.summary().source_id().is_path(); - !profile.doc && path -} - -/// Prepare the necessary work for the fingerprint of a build command. -/// -/// Build commands are located on packages, not on targets. Additionally, we -/// don't have --dep-info to drive calculation of the fingerprint of a build -/// command. This brings up an interesting predicament which gives us a few -/// options to figure out whether a build command is dirty or not: -/// -/// 1. A build command is dirty if *any* file in a package changes. In theory -/// all files are candidate for being used by the build command. -/// 2. A build command is dirty if any file in a *specific directory* changes. -/// This may lose information as it may require files outside of the specific -/// directory. -/// 3. A build command must itself provide a dep-info-like file stating how it -/// should be considered dirty or not. -/// -/// The currently implemented solution is option (1), although it is planned to -/// migrate to option (2) in the near future. -pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package, kind: Kind) - -> CargoResult { - let _p = profile::start(format!("fingerprint build cmd: {}", - pkg.package_id())); - let new = dir(cx, pkg, kind); - let loc = new.join("build"); - - info!("fingerprint at: {}", loc.display()); - - let new_fingerprint = try!(calculate_build_cmd_fingerprint(cx, pkg)); - let new_fingerprint = Arc::new(FingerprintInner { - extra: String::new(), - deps: Vec::new(), - local: LocalFingerprint::Precalculated(new_fingerprint), - resolved: Mutex::new(None), - }); - - let is_fresh = try!(is_fresh(&loc, &new_fingerprint)); - - Ok(prepare(is_fresh, false, loc, new_fingerprint)) -} - -/// Prepare work for when a package starts to build -pub fn prepare_init(cx: &mut Context, pkg: &Package, kind: Kind) - -> (Work, Work) { - let new1 = dir(cx, pkg, kind); - let new2 = new1.clone(); - - let work1 = Work::new(move |_| { - if fs::metadata(&new1).is_err() { - try!(fs::create_dir(&new1)); - } - Ok(()) - }); - let work2 = Work::new(move |_| { - if fs::metadata(&new2).is_err() { - try!(fs::create_dir(&new2)); - } - Ok(()) - }); - - (work1, work2) -} - -/// Given the data to build and write a fingerprint, generate some Work -/// instances to actually perform the necessary work. -fn prepare(is_fresh: bool, - allow_failure: bool, - loc: PathBuf, - fingerprint: Fingerprint) -> Preparation { - let write_fingerprint = Work::new(move |_| { - debug!("write fingerprint: {}", loc.display()); - let fingerprint = fingerprint.resolve(true).chain_error(|| { - internal("failed to resolve a pending fingerprint") - }); - let fingerprint = match fingerprint { - Ok(f) => f, - Err(..) if allow_failure => return Ok(()), - Err(e) => return Err(e), - }; - let mut f = try!(File::create(&loc)); - try!(f.write_all(fingerprint.as_bytes())); - Ok(()) - }); - - (if is_fresh {Fresh} else {Dirty}, write_fingerprint, Work::noop()) -} - -/// Return the (old, new) location for fingerprints for a package -pub fn dir(cx: &Context, pkg: &Package, kind: Kind) -> PathBuf { - cx.layout(pkg, kind).proxy().fingerprint(pkg) -} - -/// Returns the (old, new) location for the dep info file of a target. -pub fn dep_info_loc(cx: &Context, pkg: &Package, target: &Target, - profile: &Profile, kind: Kind) -> PathBuf { - dir(cx, pkg, kind).join(&format!("dep-{}", filename(target, profile))) -} - -fn is_fresh(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult { - let mut file = match File::open(loc) { - Ok(file) => file, - Err(..) => return Ok(false), - }; - - let mut old_fingerprint = String::new(); - try!(file.read_to_string(&mut old_fingerprint)); - let new_fingerprint = match new_fingerprint.resolve(false) { - Ok(s) => s, - Err(..) => return Ok(false), - }; - - trace!("old fingerprint: {}", old_fingerprint); - trace!("new fingerprint: {}", new_fingerprint); - - Ok(old_fingerprint == new_fingerprint) -} - -fn calculate_target_mtime(dep_info: &Path) -> CargoResult> { - macro_rules! fs_try { - ($e:expr) => (match $e { Ok(e) => e, Err(..) => return Ok(None) }) - } - let mut f = BufReader::new(fs_try!(File::open(dep_info))); - // see comments in append_current_dir for where this cwd is manifested from. - let mut cwd = Vec::new(); - fs_try!(f.read_until(0, &mut cwd)); - let cwd = try!(util::bytes2path(&cwd[..cwd.len()-1])); - let line = match f.lines().next() { - Some(Ok(line)) => line, - _ => return Ok(None), - }; - let meta = try!(fs::metadata(&dep_info)); - let mtime = FileTime::from_last_modification_time(&meta); - let pos = try!(line.find(": ").chain_error(|| { - internal(format!("dep-info not in an understood format: {}", - dep_info.display())) - })); - let deps = &line[pos + 2..]; - - let mut deps = deps.split(' ').map(|s| s.trim()).filter(|s| !s.is_empty()); - loop { - let mut file = match deps.next() { - Some(s) => s.to_string(), - None => break, - }; - while file.ends_with("\\") { - file.pop(); - file.push(' '); - file.push_str(deps.next().unwrap()) - } - let meta = match fs::metadata(cwd.join(&file)) { - Ok(meta) => meta, - Err(..) => { info!("stale: {} -- missing", file); return Ok(None) } - }; - let file_mtime = FileTime::from_last_modification_time(&meta); - if file_mtime > mtime { - info!("stale: {} -- {} vs {}", file, file_mtime, mtime); - return Ok(None) - } - } - - Ok(Some(mtime)) -} - -fn calculate_build_cmd_fingerprint(cx: &Context, pkg: &Package) - -> CargoResult { - // TODO: this should be scoped to just the `build` directory, not the entire - // package. - calculate_pkg_fingerprint(cx, pkg) -} - -fn calculate_pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult { - let source = cx.sources - .get(pkg.package_id().source_id()) - .expect("BUG: Missing package source"); - - source.fingerprint(pkg) -} - -fn filename(target: &Target, profile: &Profile) -> String { - let kind = if target.is_lib() {"lib"} else {"bin"}; - let flavor = if target.is_test() || profile.test { - "test-" - } else if profile.doc { - "doc-" - } else { - "" - }; - format!("{}{}-{}", flavor, kind, target.name()) -} - -// The dep-info files emitted by the compiler all have their listed paths -// relative to whatever the current directory was at the time that the compiler -// was invoked. As the current directory may change over time, we need to record -// what that directory was at the beginning of the file so we can know about it -// next time. -pub fn append_current_dir(path: &Path, cwd: &Path) -> CargoResult<()> { - debug!("appending {} <- {}", path.display(), cwd.display()); - let mut f = try!(OpenOptions::new().read(true).write(true).open(path)); - let mut contents = Vec::new(); - try!(f.read_to_end(&mut contents)); - try!(f.seek(SeekFrom::Start(0))); - try!(f.write_all(try!(util::path2bytes(cwd)))); - try!(f.write_all(&[0])); - try!(f.write_all(&contents)); - Ok(()) -} diff --git a/src/cargo/ops/cargo_rustc/job.rs b/src/cargo/ops/cargo_rustc/job.rs deleted file mode 100644 index b3d58e1d628..00000000000 --- a/src/cargo/ops/cargo_rustc/job.rs +++ /dev/null @@ -1,53 +0,0 @@ -use std::sync::mpsc::Sender; - -use util::{CargoResult, Fresh, Dirty, Freshness}; - -pub struct Job { dirty: Work, fresh: Work } - -/// Each proc should send its description before starting. -/// It should send either once or close immediately. -pub struct Work { - inner: Box, CargoResult<()>> + Send>, -} - -trait FnBox { - fn call_box(self: Box, a: A) -> R; -} - -impl R> FnBox for F { - fn call_box(self: Box, a: A) -> R { - (*self)(a) - } -} - -impl Work { - pub fn new(f: F) -> Work - where F: FnOnce(Sender) -> CargoResult<()> + Send + 'static - { - Work { inner: Box::new(f) } - } - - pub fn noop() -> Work { - Work::new(|_| Ok(())) - } - - pub fn call(self, tx: Sender) -> CargoResult<()> { - self.inner.call_box(tx) - } -} - -impl Job { - /// Create a new job representing a unit of work. - pub fn new(dirty: Work, fresh: Work) -> Job { - Job { dirty: dirty, fresh: fresh } - } - - /// Consumes this job by running it, returning the result of the - /// computation. - pub fn run(self, fresh: Freshness, tx: Sender) -> CargoResult<()> { - match fresh { - Fresh => self.fresh.call(tx), - Dirty => self.dirty.call(tx), - } - } -} diff --git a/src/cargo/ops/cargo_rustc/job_queue.rs b/src/cargo/ops/cargo_rustc/job_queue.rs deleted file mode 100644 index 3f74e85e98f..00000000000 --- a/src/cargo/ops/cargo_rustc/job_queue.rs +++ /dev/null @@ -1,317 +0,0 @@ -use std::collections::HashSet; -use std::collections::hash_map::HashMap; -use std::sync::mpsc::{channel, Sender, Receiver}; - -use threadpool::ThreadPool; -use term::color::YELLOW; - -use core::{Package, PackageId, Resolve, PackageSet}; -use util::{Config, DependencyQueue, Fresh, Dirty, Freshness}; -use util::{CargoResult, Dependency, profile}; - -use super::job::Job; - -/// A management structure of the entire dependency graph to compile. -/// -/// This structure is backed by the `DependencyQueue` type and manages the -/// actual compilation step of each package. Packages enqueue units of work and -/// then later on the entire graph is processed and compiled. -pub struct JobQueue<'a> { - pool: ThreadPool, - queue: DependencyQueue<(&'a PackageId, Stage), - (&'a Package, Vec<(Job, Freshness)>)>, - tx: Sender, - rx: Receiver, - resolve: &'a Resolve, - packages: &'a PackageSet, - active: u32, - pending: HashMap<(&'a PackageId, Stage), PendingBuild>, - pkgids: HashSet<&'a PackageId>, - printed: HashSet<&'a PackageId>, -} - -/// A helper structure for metadata about the state of a building package. -struct PendingBuild { - /// Number of jobs currently active - amt: u32, - /// Current freshness state of this package. Any dirty target within a - /// package will cause the entire package to become dirty. - fresh: Freshness, -} - -/// Current stage of compilation for an individual package. -/// -/// This is the second layer of keys on the dependency queue to track the state -/// of where a particular package is in the compilation pipeline. Each of these -/// stages has a network of dependencies among them, outlined by the -/// `Dependency` implementation found below. -/// -/// Each build step for a package is registered with one of these stages, and -/// each stage has a vector of work to perform in parallel. -#[derive(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Debug, Copy)] -pub enum Stage { - Start, - BuildCustomBuild, - RunCustomBuild, - Libraries, - Binaries, - LibraryTests, - BinaryTests, - End, -} - -type Message = (PackageId, Stage, Freshness, CargoResult<()>); - -impl<'a> JobQueue<'a> { - pub fn new(resolve: &'a Resolve, packages: &'a PackageSet, jobs: u32) - -> JobQueue<'a> { - let (tx, rx) = channel(); - JobQueue { - pool: ThreadPool::new(jobs as usize), - queue: DependencyQueue::new(), - tx: tx, - rx: rx, - resolve: resolve, - packages: packages, - active: 0, - pending: HashMap::new(), - pkgids: HashSet::new(), - printed: HashSet::new(), - } - } - - pub fn queue(&mut self, pkg: &'a Package, stage: Stage) - -> &mut Vec<(Job, Freshness)> { - self.pkgids.insert(pkg.package_id()); - &mut self.queue.queue(&(self.resolve, self.packages), Fresh, - (pkg.package_id(), stage), - (pkg, Vec::new())).1 - } - - /// Execute all jobs necessary to build the dependency graph. - /// - /// This function will spawn off `config.jobs()` workers to build all of the - /// necessary dependencies, in order. Freshness is propagated as far as - /// possible along each dependency chain. - pub fn execute(&mut self, config: &Config) -> CargoResult<()> { - let _p = profile::start("executing the job graph"); - - // Iteratively execute the dependency graph. Each turn of this loop will - // schedule as much work as possible and then wait for one job to finish, - // possibly scheduling more work afterwards. - while self.queue.len() > 0 { - loop { - match self.queue.dequeue() { - Some((fresh, (_, stage), (pkg, jobs))) => { - info!("start: {} {:?}", pkg, stage); - try!(self.run(pkg, stage, fresh, jobs, config)); - } - None => break, - } - } - - // Now that all possible work has been scheduled, wait for a piece - // of work to finish. If any package fails to build then we stop - // scheduling work as quickly as possibly. - let (id, stage, fresh, result) = self.rx.recv().unwrap(); - info!(" end: {} {:?}", id, stage); - let id = *self.pkgids.iter().find(|&k| *k == &id).unwrap(); - self.active -= 1; - match result { - Ok(()) => { - let state = self.pending.get_mut(&(id, stage)).unwrap(); - state.amt -= 1; - state.fresh = state.fresh.combine(fresh); - if state.amt == 0 { - self.queue.finish(&(id, stage), state.fresh); - } - } - Err(e) => { - if self.active > 0 { - try!(config.shell().say( - "Build failed, waiting for other \ - jobs to finish...", YELLOW)); - for _ in self.rx.iter().take(self.active as usize) {} - } - return Err(e) - } - } - } - - trace!("rustc jobs completed"); - - Ok(()) - } - - /// Execute a stage of compilation for a package. - /// - /// The input freshness is from `dequeue()` and indicates the combined - /// freshness of all upstream dependencies. This function will schedule all - /// work in `jobs` to be executed. - fn run(&mut self, pkg: &'a Package, stage: Stage, fresh: Freshness, - jobs: Vec<(Job, Freshness)>, config: &Config) -> CargoResult<()> { - let njobs = jobs.len(); - let amt = if njobs == 0 {1} else {njobs as u32}; - let id = pkg.package_id().clone(); - - // While the jobs are all running, we maintain some metadata about how - // many are running, the current state of freshness (of all the combined - // jobs), and the stage to pass to finish() later on. - self.active += amt; - self.pending.insert((pkg.package_id(), stage), PendingBuild { - amt: amt, - fresh: fresh, - }); - - let mut total_fresh = fresh; - let mut running = Vec::new(); - debug!("start {:?} at {:?} for {}", total_fresh, stage, pkg); - for (job, job_freshness) in jobs.into_iter() { - debug!("job: {:?} ({:?})", job_freshness, total_fresh); - let fresh = job_freshness.combine(fresh); - total_fresh = total_fresh.combine(fresh); - let my_tx = self.tx.clone(); - let id = id.clone(); - let (desc_tx, desc_rx) = channel(); - self.pool.execute(move|| { - my_tx.send((id, stage, fresh, job.run(fresh, desc_tx))).unwrap(); - }); - // only the first message of each job is processed - match desc_rx.recv() { - Ok(msg) => running.push(msg), - Err(..) => {} - } - } - - // If no work was scheduled, make sure that a message is actually send - // on this channel. - if njobs == 0 { - self.tx.send((id, stage, fresh, Ok(()))).unwrap(); - } - - // Print out some nice progress information - try!(self.note_working_on(config, pkg.package_id(), stage, total_fresh, - running.len())); - for msg in running.iter() { - try!(config.shell().verbose(|c| c.status("Running", msg))); - } - Ok(()) - } - - // This isn't super trivial because we don't want to print loads and - // loads of information to the console, but we also want to produce a - // faithful representation of what's happening. This is somewhat nuanced - // as a package can start compiling *very* early on because of custom - // build commands and such. - // - // In general, we try to print "Compiling" for the first nontrivial task - // run for a package, regardless of when that is. We then don't print - // out any more information for a package after we've printed it once. - fn note_working_on(&mut self, config: &Config, pkg: &'a PackageId, - stage: Stage, fresh: Freshness, cmds_run: usize) - -> CargoResult<()> { - if self.printed.contains(&pkg) { return Ok(()) } - - match fresh { - // Any dirty stage which runs at least one command gets printed as - // being a compiled package - Dirty if cmds_run == 0 => {} - Dirty => { - self.printed.insert(pkg); - try!(config.shell().status("Compiling", pkg)); - } - Fresh if stage == Stage::End => { - self.printed.insert(pkg); - try!(config.shell().verbose(|c| c.status("Fresh", pkg))); - } - Fresh => {} - } - Ok(()) - } -} - -impl<'a> Dependency for (&'a PackageId, Stage) { - type Context = (&'a Resolve, &'a PackageSet); - - fn dependencies(&self, &(resolve, packages): &(&'a Resolve, &'a PackageSet)) - -> Vec<(&'a PackageId, Stage)> { - // This implementation of `Dependency` is the driver for the structure - // of the dependency graph of packages to be built. The "key" here is - // a pair of the package being built and the stage that it's at. - // - // Each stage here lists dependencies on the previous stages except for - // the start state which depends on the ending state of all dependent - // packages (as determined by the resolve context). - let (id, stage) = *self; - let pkg = packages.iter().find(|p| p.package_id() == id).unwrap(); - let deps = resolve.deps(id).into_iter().flat_map(|a| a) - .filter(|dep| *dep != id); - match stage { - Stage::Start => Vec::new(), - - // Building the build command itself starts off pretty easily,we - // just need to depend on all of the library stages of our own build - // dependencies (making them available to us). - Stage::BuildCustomBuild => { - let mut base = vec![(id, Stage::Start)]; - base.extend(deps.filter(|id| { - pkg.dependencies().iter().any(|d| { - d.name() == id.name() && d.is_build() - }) - }).map(|id| (id, Stage::Libraries))); - base - } - - // When running a custom build command, we need to be sure that our - // own custom build command is actually built, and then we need to - // wait for all our dependencies to finish their custom build - // commands themselves (as they may provide input to us). - Stage::RunCustomBuild => { - let mut base = vec![(id, Stage::BuildCustomBuild)]; - base.extend(deps.filter(|id| { - pkg.dependencies().iter().any(|d| { - d.name() == id.name() && d.is_transitive() - }) - }).map(|id| (id, Stage::RunCustomBuild))); - base - } - - // Building a library depends on our own custom build command plus - // all our transitive dependencies. - Stage::Libraries => { - let mut base = vec![(id, Stage::RunCustomBuild)]; - base.extend(deps.filter(|id| { - pkg.dependencies().iter().any(|d| { - d.name() == id.name() && d.is_transitive() - }) - }).map(|id| (id, Stage::Libraries))); - base - } - - // Binaries only depend on libraries being available. Note that they - // do not depend on dev-dependencies. - Stage::Binaries => vec![(id, Stage::Libraries)], - - // Tests depend on all dependencies (including dev-dependencies) in - // addition to the library stage for this package. Note, however, - // that library tests only need to depend the custom build command - // being run, not the libraries themselves. - Stage::BinaryTests | Stage::LibraryTests => { - let mut base = if stage == Stage::BinaryTests { - vec![(id, Stage::Libraries)] - } else { - vec![(id, Stage::RunCustomBuild)] - }; - base.extend(deps.map(|id| (id, Stage::Libraries))); - base - } - - // A marker stage to indicate when a package has entirely finished - // compiling, nothing is actually built as part of this stage. - Stage::End => { - vec![(id, Stage::Binaries), (id, Stage::BinaryTests), - (id, Stage::LibraryTests)] - } - } - } -} diff --git a/src/cargo/ops/cargo_rustc/layout.rs b/src/cargo/ops/cargo_rustc/layout.rs deleted file mode 100644 index 5ab7bb5a4fd..00000000000 --- a/src/cargo/ops/cargo_rustc/layout.rs +++ /dev/null @@ -1,157 +0,0 @@ -//! Management of the directory layout of a build -//! -//! The directory layout is a little tricky at times, hence a separate file to -//! house this logic. The current layout looks like this: -//! -//! ```ignore -//! # This is the root directory for all output, the top-level package -//! # places all of its output here. -//! target/ -//! -//! # This is the root directory for all output of *dependencies* -//! deps/ -//! -//! # Root directory for all compiled examples -//! examples/ -//! -//! # This is the location at which the output of all custom build -//! # commands are rooted -//! build/ -//! -//! # Each package gets its own directory where its build script and -//! # script output are placed -//! $pkg1/ -//! $pkg2/ -//! $pkg3/ -//! -//! # Each directory package has a `out` directory where output -//! # is placed. -//! out/ -//! -//! # This is the location at which the output of all old custom build -//! # commands are rooted -//! native/ -//! -//! # Each package gets its own directory for where its output is -//! # placed. We can't track exactly what's getting put in here, so -//! # we just assume that all relevant output is in these -//! # directories. -//! $pkg1/ -//! $pkg2/ -//! $pkg3/ -//! -//! # Hidden directory that holds all of the fingerprint files for all -//! # packages -//! .fingerprint/ -//! ``` - -use std::fs; -use std::io; -use std::path::{PathBuf, Path}; - -use core::Package; -use util::Config; -use util::hex::short_hash; - -pub struct Layout { - root: PathBuf, - deps: PathBuf, - native: PathBuf, - build: PathBuf, - fingerprint: PathBuf, - examples: PathBuf, -} - -pub struct LayoutProxy<'a> { - root: &'a Layout, - primary: bool, -} - -impl Layout { - pub fn new(config: &Config, pkg: &Package, triple: Option<&str>, - dest: &str) -> Layout { - let mut path = config.target_dir(pkg); - // Flexible target specifications often point at filenames, so interpret - // the target triple as a Path and then just use the file stem as the - // component for the directory name. - if let Some(triple) = triple { - path.push(Path::new(triple).file_stem().unwrap()); - } - path.push(dest); - Layout::at(path) - } - - pub fn at(root: PathBuf) -> Layout { - Layout { - deps: root.join("deps"), - native: root.join("native"), - build: root.join("build"), - fingerprint: root.join(".fingerprint"), - examples: root.join("examples"), - root: root, - } - } - - pub fn prepare(&mut self) -> io::Result<()> { - if fs::metadata(&self.root).is_err() { - try!(fs::create_dir_all(&self.root)); - } - - try!(mkdir(&self.deps)); - try!(mkdir(&self.native)); - try!(mkdir(&self.fingerprint)); - try!(mkdir(&self.examples)); - try!(mkdir(&self.build)); - - return Ok(()); - - fn mkdir(dir: &Path) -> io::Result<()> { - if fs::metadata(&dir).is_err() { - try!(fs::create_dir(dir)); - } - Ok(()) - } - } - - pub fn dest<'a>(&'a self) -> &'a Path { &self.root } - pub fn deps<'a>(&'a self) -> &'a Path { &self.deps } - pub fn examples<'a>(&'a self) -> &'a Path { &self.examples } - - pub fn fingerprint(&self, package: &Package) -> PathBuf { - self.fingerprint.join(&self.pkg_dir(package)) - } - - pub fn build(&self, package: &Package) -> PathBuf { - self.build.join(&self.pkg_dir(package)) - } - - pub fn build_out(&self, package: &Package) -> PathBuf { - self.build(package).join("out") - } - - fn pkg_dir(&self, pkg: &Package) -> String { - format!("{}-{}", pkg.name(), short_hash(pkg)) - } -} - -impl<'a> LayoutProxy<'a> { - pub fn new(root: &'a Layout, primary: bool) -> LayoutProxy<'a> { - LayoutProxy { - root: root, - primary: primary, - } - } - - pub fn root(&self) -> &'a Path { - if self.primary {self.root.dest()} else {self.root.deps()} - } - pub fn deps(&self) -> &'a Path { self.root.deps() } - - pub fn examples(&self) -> &'a Path { self.root.examples() } - - pub fn build(&self, pkg: &Package) -> PathBuf { self.root.build(pkg) } - - pub fn build_out(&self, pkg: &Package) -> PathBuf { self.root.build_out(pkg) } - - pub fn proxy(&self) -> &'a Layout { self.root } -} diff --git a/src/cargo/ops/cargo_rustc/links.rs b/src/cargo/ops/cargo_rustc/links.rs deleted file mode 100644 index 8f152d76c45..00000000000 --- a/src/cargo/ops/cargo_rustc/links.rs +++ /dev/null @@ -1,35 +0,0 @@ -use std::collections::HashMap; - -use core::PackageSet; -use util::{CargoResult, human}; - -// Validate that there are no duplicated native libraries among packages and -// that all packages with `links` also have a build script. -pub fn validate(deps: &PackageSet) -> CargoResult<()> { - let mut map = HashMap::new(); - - for dep in deps.iter() { - let lib = match dep.manifest().links() { - Some(lib) => lib, - None => continue, - }; - match map.get(&lib) { - Some(previous) => { - return Err(human(format!("native library `{}` is being linked \ - to by more than one package, and \ - can only be linked to by one \ - package\n\n {}\n {}", - lib, previous, dep.package_id()))) - } - None => {} - } - if !dep.manifest().targets().iter().any(|t| t.is_custom_build()) { - return Err(human(format!("package `{}` specifies that it links to \ - `{}` but does not have a custom build \ - script", dep.package_id(), lib))) - } - map.insert(lib, dep.package_id()); - } - - Ok(()) -} diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs deleted file mode 100644 index ab0887864c9..00000000000 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ /dev/null @@ -1,783 +0,0 @@ -use std::collections::{HashSet, HashMap}; -use std::env; -use std::ffi::{OsStr, OsString}; -use std::fs; -use std::io::prelude::*; -use std::path::{self, PathBuf}; -use std::sync::Arc; - -use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve}; -use core::{Profile, Profiles}; -use util::{self, CargoResult, human, caused_human}; -use util::{Config, internal, ChainError, Fresh, profile, join_paths}; - -use self::job::{Job, Work}; -use self::job_queue::{JobQueue, Stage}; - -pub use self::compilation::Compilation; -pub use self::context::Context; -pub use self::context::Platform; -pub use self::engine::{CommandPrototype, CommandType, ExecEngine, ProcessEngine}; -pub use self::layout::{Layout, LayoutProxy}; -pub use self::custom_build::{BuildOutput, BuildMap}; - -mod context; -mod compilation; -mod custom_build; -mod engine; -mod fingerprint; -mod job; -mod job_queue; -mod layout; -mod links; - -#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy)] -pub enum Kind { Host, Target } - -#[derive(Default, Clone)] -pub struct BuildConfig { - pub host: TargetConfig, - pub target: TargetConfig, - pub jobs: u32, - pub requested_target: Option, - pub exec_engine: Option>>, - pub release: bool, - pub doc_all: bool, -} - -#[derive(Clone, Default)] -pub struct TargetConfig { - pub ar: Option, - pub linker: Option, - pub overrides: HashMap, -} - -// Returns a mapping of the root package plus its immediate dependencies to -// where the compiled libraries are all located. -pub fn compile_targets<'a, 'cfg: 'a>(targets: &[(&'a Target, &'a Profile)], - pkg: &'a Package, - deps: &'a PackageSet, - resolve: &'a Resolve, - sources: &'a SourceMap<'cfg>, - config: &'cfg Config, - build_config: BuildConfig, - profiles: &'a Profiles) - -> CargoResult> { - if targets.is_empty() { - return Ok(Compilation::new(pkg, config)) - } - - debug!("compile_targets: {}", pkg); - - try!(links::validate(deps)); - - let dest = if build_config.release {"release"} else {"debug"}; - let root = if resolve.root() == pkg.package_id() { - pkg - } else { - deps.iter().find(|p| p.package_id() == resolve.root()).unwrap() - }; - let host_layout = Layout::new(config, root, None, &dest); - let target_layout = build_config.requested_target.as_ref().map(|target| { - layout::Layout::new(config, root, Some(&target), &dest) - }); - - let mut cx = try!(Context::new(resolve, sources, deps, config, - host_layout, target_layout, pkg, - build_config, profiles)); - - let mut queue = JobQueue::new(cx.resolve, deps, cx.jobs()); - - // Prep the context's build requirements and see the job graph for all - // packages initially. - { - let _p = profile::start("preparing build directories"); - try!(cx.prepare(pkg, targets)); - prepare_init(&mut cx, pkg, &mut queue, &mut HashSet::new()); - custom_build::build_map(&mut cx, pkg, targets); - } - - // Build up a list of pending jobs, each of which represent compiling a - // particular package. No actual work is executed as part of this, that's - // all done next as part of the `execute` function which will run - // everything in order with proper parallelism. - try!(compile(targets, pkg, &mut cx, &mut queue)); - - // Now that we've figured out everything that we're going to do, do it! - try!(queue.execute(cx.config)); - - let out_dir = cx.layout(pkg, Kind::Target).build_out(pkg) - .display().to_string(); - cx.compilation.extra_env.insert("OUT_DIR".to_string(), out_dir); - - for &(target, profile) in targets { - let kind = Kind::from(target); - for filename in try!(cx.target_filenames(pkg, target, profile, - kind)).iter() { - let dst = cx.out_dir(pkg, kind, target).join(filename); - if profile.test { - cx.compilation.tests.push((target.name().to_string(), dst)); - } else if target.is_bin() || target.is_example() { - cx.compilation.binaries.push(dst); - } else if target.is_lib() { - let pkgid = pkg.package_id().clone(); - cx.compilation.libraries.entry(pkgid).or_insert(Vec::new()) - .push((target.clone(), dst)); - } - if !target.is_lib() { continue } - - // Include immediate lib deps as well - for dep in cx.dep_targets(pkg, target, kind, profile) { - let (pkg, target, profile) = dep; - let pkgid = pkg.package_id(); - if !target.is_lib() { continue } - if profile.doc { continue } - if cx.compilation.libraries.contains_key(&pkgid) { continue } - - let kind = kind.for_target(target); - let v = try!(cx.target_filenames(pkg, target, profile, kind)); - let v = v.into_iter().map(|f| { - (target.clone(), cx.out_dir(pkg, kind, target).join(f)) - }).collect::>(); - cx.compilation.libraries.insert(pkgid.clone(), v); - } - } - } - - if let Some(feats) = cx.resolve.features(pkg.package_id()) { - cx.compilation.features.extend(feats.iter().cloned()); - } - - for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() { - let any_dylib = output.library_links.iter().any(|l| { - !l.starts_with("static=") && !l.starts_with("framework=") - }); - if !any_dylib { continue } - for dir in output.library_paths.iter() { - cx.compilation.native_dirs.insert(pkg.clone(), dir.clone()); - } - } - Ok(cx.compilation) -} - -fn compile<'a, 'cfg>(targets: &[(&'a Target, &'a Profile)], - pkg: &'a Package, - cx: &mut Context<'a, 'cfg>, - jobs: &mut JobQueue<'a>) -> CargoResult<()> { - debug!("compile_pkg; pkg={}", pkg); - - // For each target/profile run the compiler or rustdoc accordingly. After - // having done so we enqueue the job in the right portion of the dependency - // graph and then move on to the next. - // - // This loop also takes care of enqueueing the work needed to actually run - // the custom build commands as well. - for &(target, profile) in targets { - if !cx.compiled.insert((pkg.package_id(), target, profile)) { - continue - } - - let profiling_marker = profile::start(format!("preparing: {}/{}", - pkg, target.name())); - let work = if profile.doc { - let rustdoc = try!(rustdoc(pkg, target, profile, cx)); - vec![(rustdoc, Kind::Target)] - } else { - let req = cx.get_requirement(pkg, target); - try!(rustc(pkg, target, profile, cx, req)) - }; - - let kinds = work.iter().map(|&(_, kind)| kind).collect::>(); - - for (work, kind) in work { - let (freshness, dirty, fresh) = - try!(fingerprint::prepare_target(cx, pkg, target, profile, kind)); - - let dirty = Work::new(move |desc_tx| { - try!(work.call(desc_tx.clone())); - dirty.call(desc_tx) - }); - - // Figure out what stage this work will go into - let dst = match (target.is_lib(), - profile.test, - target.is_custom_build()) { - (_, _, true) => jobs.queue(pkg, Stage::BuildCustomBuild), - (true, true, _) => jobs.queue(pkg, Stage::LibraryTests), - (false, true, _) => jobs.queue(pkg, Stage::BinaryTests), - (true, false, _) => jobs.queue(pkg, Stage::Libraries), - (false, false, _) if !target.is_bin() => { - jobs.queue(pkg, Stage::BinaryTests) - } - (false, false, _) => jobs.queue(pkg, Stage::Binaries), - }; - dst.push((Job::new(dirty, fresh), freshness)); - - } - drop(profiling_marker); - - // Be sure to compile all dependencies of this target as well. - for kind in kinds { - for (pkg, target, p) in cx.dep_targets(pkg, target, kind, profile) { - try!(compile(&[(target, p)], pkg, cx, jobs)); - } - } - - // If this is a custom build command, we need to not only build the - // script but we also need to run it. Note that this is a little nuanced - // because we may need to run the build script multiple times. If the - // package is needed in both a host and target context, we need to run - // it once per context. - if !target.is_custom_build() { continue } - let mut reqs = Vec::new(); - let requirement = pkg.targets().iter().filter(|t| !t.is_custom_build()) - .fold(None::, |req, t| { - let r2 = cx.get_requirement(pkg, t); - req.map(|r| r.combine(r2)).or(Some(r2)) - }).unwrap_or(Platform::Target); - match requirement { - Platform::Target => reqs.push(Platform::Target), - Platform::Plugin => reqs.push(Platform::Plugin), - Platform::PluginAndTarget => { - if cx.requested_target().is_some() { - reqs.push(Platform::Plugin); - reqs.push(Platform::Target); - } else { - reqs.push(Platform::PluginAndTarget); - } - } - } - let before = jobs.queue(pkg, Stage::RunCustomBuild).len(); - for &req in reqs.iter() { - let kind = match req { - Platform::Plugin => Kind::Host, - _ => Kind::Target, - }; - let key = (pkg.package_id().clone(), kind); - if pkg.manifest().links().is_some() && - cx.build_state.outputs.lock().unwrap().contains_key(&key) { - continue - } - let (dirty, fresh, freshness) = - try!(custom_build::prepare(pkg, target, req, cx)); - let run_custom = jobs.queue(pkg, Stage::RunCustomBuild); - run_custom.push((Job::new(dirty, fresh), freshness)); - } - - // If we didn't actually run the custom build command, then there's no - // need to compile it. - if jobs.queue(pkg, Stage::RunCustomBuild).len() == before { - jobs.queue(pkg, Stage::BuildCustomBuild).pop(); - } - } - - Ok(()) -} - -fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, - pkg: &'a Package, - jobs: &mut JobQueue<'a>, - visited: &mut HashSet<&'a PackageId>) { - if !visited.insert(pkg.package_id()) { return } - - // Set up all dependencies - for dep in cx.resolve.deps(pkg.package_id()).into_iter().flat_map(|a| a) { - let dep = cx.get_package(dep); - prepare_init(cx, dep, jobs, visited); - } - - // Initialize blank queues for each stage - jobs.queue(pkg, Stage::BuildCustomBuild); - jobs.queue(pkg, Stage::RunCustomBuild); - jobs.queue(pkg, Stage::Libraries); - jobs.queue(pkg, Stage::Binaries); - jobs.queue(pkg, Stage::LibraryTests); - jobs.queue(pkg, Stage::BinaryTests); - jobs.queue(pkg, Stage::End); - - // Prepare the fingerprint directory as the first step of building a package - let (target1, target2) = fingerprint::prepare_init(cx, pkg, Kind::Target); - let init = jobs.queue(pkg, Stage::Start); - if cx.requested_target().is_some() { - let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg, - Kind::Host); - init.push((Job::new(plugin1, plugin2), Fresh)); - } - init.push((Job::new(target1, target2), Fresh)); -} - -fn rustc(package: &Package, target: &Target, profile: &Profile, - cx: &mut Context, req: Platform) - -> CargoResult >{ - let crate_types = target.rustc_crate_types(); - let rustcs = try!(prepare_rustc(package, target, profile, crate_types, - cx, req)); - - let plugin_deps = load_build_deps(cx, package, target, profile, Kind::Host); - - return rustcs.into_iter().map(|(mut rustc, kind)| { - let name = package.name().to_string(); - let is_path_source = package.package_id().source_id().is_path(); - let allow_warnings = package.package_id() == cx.resolve.root() || - is_path_source; - if !allow_warnings { - if cx.config.rustc_info().cap_lints { - rustc.arg("--cap-lints").arg("allow"); - } else { - rustc.arg("-Awarnings"); - } - } - let has_custom_args = profile.rustc_args.is_some(); - let exec_engine = cx.exec_engine.clone(); - - let filenames = try!(cx.target_filenames(package, target, profile, - kind)); - let root = cx.out_dir(package, kind, target); - - // Prepare the native lib state (extra -L and -l flags) - let build_state = cx.build_state.clone(); - let current_id = package.package_id().clone(); - let plugin_deps = plugin_deps.clone(); - let mut native_lib_deps = load_build_deps(cx, package, target, profile, - kind); - if package.has_custom_build() && !target.is_custom_build() { - native_lib_deps.insert(0, current_id.clone()); - } - - // If we are a binary and the package also contains a library, then we - // don't pass the `-l` flags. - let pass_l_flag = target.is_lib() || !package.targets().iter().any(|t| { - t.is_lib() - }); - let do_rename = target.allows_underscores() && !profile.test; - let real_name = target.name().to_string(); - let crate_name = target.crate_name(); - - let rustc_dep_info_loc = if do_rename { - root.join(&crate_name) - } else { - root.join(&cx.file_stem(package, target, profile)) - }.with_extension("d"); - let dep_info_loc = fingerprint::dep_info_loc(cx, package, target, - profile, kind); - let cwd = cx.config.cwd().to_path_buf(); - - Ok((Work::new(move |desc_tx| { - debug!("about to run: {}", rustc); - - // Only at runtime have we discovered what the extra -L and -l - // arguments are for native libraries, so we process those here. We - // also need to be sure to add any -L paths for our plugins to the - // dynamic library load path as a plugin's dynamic library may be - // located somewhere in there. - let build_state = build_state.outputs.lock().unwrap(); - add_native_deps(&mut rustc, &build_state, native_lib_deps, - kind, pass_l_flag, ¤t_id); - try!(add_plugin_deps(&mut rustc, &build_state, plugin_deps)); - drop(build_state); - - // FIXME(rust-lang/rust#18913): we probably shouldn't have to do - // this manually - for filename in filenames.iter() { - let dst = root.join(filename); - if fs::metadata(&dst).is_ok() { - try!(fs::remove_file(&dst)); - } - } - - desc_tx.send(rustc.to_string()).ok(); - try!(exec_engine.exec(rustc).chain_error(|| { - human(format!("Could not compile `{}`.", name)) - })); - - if do_rename && real_name != crate_name { - let dst = root.join(&filenames[0]); - let src = dst.with_file_name(dst.file_name().unwrap() - .to_str().unwrap() - .replace(&real_name, &crate_name)); - if !has_custom_args || fs::metadata(&src).is_ok() { - try!(fs::rename(&src, &dst).chain_error(|| { - internal(format!("could not rename crate {:?}", src)) - })); - } - } - - if !has_custom_args || fs::metadata(&rustc_dep_info_loc).is_ok() { - try!(fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| { - internal(format!("could not rename dep info: {:?}", - rustc_dep_info_loc)) - })); - try!(fingerprint::append_current_dir(&dep_info_loc, &cwd)); - } - - Ok(()) - - }), kind)) - }).collect(); - - // Add all relevant -L and -l flags from dependencies (now calculated and - // present in `state`) to the command provided - fn add_native_deps(rustc: &mut CommandPrototype, - build_state: &BuildMap, - native_lib_deps: Vec, - kind: Kind, - pass_l_flag: bool, - current_id: &PackageId) { - for id in native_lib_deps.into_iter() { - debug!("looking up {} {:?}", id, kind); - let output = &build_state[&(id.clone(), kind)]; - for path in output.library_paths.iter() { - rustc.arg("-L").arg(path); - } - if id == *current_id { - for cfg in &output.cfgs { - rustc.arg("--cfg").arg(cfg); - } - if pass_l_flag { - for name in output.library_links.iter() { - rustc.arg("-l").arg(name); - } - } - } - } - } -} - -fn load_build_deps(cx: &Context, pkg: &Package, target: &Target, - profile: &Profile, kind: Kind) -> Vec { - let pkg = cx.get_package(pkg.package_id()); - cx.build_scripts.get(&(pkg.package_id(), target, profile, kind)).map(|deps| { - deps.iter().map(|&d| d.clone()).collect::>() - }).unwrap_or(Vec::new()) -} - -// For all plugin dependencies, add their -L paths (now calculated and -// present in `state`) to the dynamic library load path for the command to -// execute. -fn add_plugin_deps(rustc: &mut CommandPrototype, - build_state: &BuildMap, - plugin_deps: Vec) - -> CargoResult<()> { - let var = util::dylib_path_envvar(); - let search_path = rustc.get_env(var).unwrap_or(OsString::new()); - let mut search_path = env::split_paths(&search_path).collect::>(); - for id in plugin_deps.into_iter() { - debug!("adding libs for plugin dep: {}", id); - let output = &build_state[&(id, Kind::Host)]; - for path in output.library_paths.iter() { - search_path.push(path.clone()); - } - } - let search_path = try!(join_paths(&search_path, var)); - rustc.env(var, &search_path); - Ok(()) -} - -fn prepare_rustc(package: &Package, target: &Target, profile: &Profile, - crate_types: Vec<&str>, - cx: &Context, req: Platform) - -> CargoResult> { - let mut base = try!(process(CommandType::Rustc, package, target, cx)); - build_base_args(cx, &mut base, package, target, profile, &crate_types); - - let mut targ_cmd = base.clone(); - let mut host_cmd = base; - build_plugin_args(&mut targ_cmd, cx, package, target, Kind::Target); - build_plugin_args(&mut host_cmd, cx, package, target, Kind::Host); - try!(build_deps_args(&mut targ_cmd, target, profile, package, cx, Kind::Target)); - try!(build_deps_args(&mut host_cmd, target, profile, package, cx, Kind::Host)); - - Ok(match req { - Platform::Target => vec![(targ_cmd, Kind::Target)], - Platform::Plugin => vec![(host_cmd, Kind::Host)], - Platform::PluginAndTarget if cx.requested_target().is_none() => { - vec![(targ_cmd, Kind::Target)] - } - Platform::PluginAndTarget => vec![(targ_cmd, Kind::Target), - (host_cmd, Kind::Host)], - }) -} - - -fn rustdoc(package: &Package, target: &Target, profile: &Profile, - cx: &mut Context) -> CargoResult { - let kind = Kind::Target; - let mut rustdoc = try!(process(CommandType::Rustdoc, package, target, cx)); - rustdoc.arg(&root_path(cx, package, target)) - .cwd(cx.config.cwd()) - .arg("--crate-name").arg(&target.crate_name()); - - let mut doc_dir = cx.config.target_dir(cx.get_package(cx.resolve.root())); - if let Some(target) = cx.requested_target() { - rustdoc.arg("--target").arg(target); - doc_dir.push(target); - } - - doc_dir.push("doc"); - rustdoc.arg("-o").arg(doc_dir); - - match cx.resolve.features(package.package_id()) { - Some(features) => { - for feat in features { - rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); - } - } - None => {} - } - - try!(build_deps_args(&mut rustdoc, target, profile, package, cx, kind)); - - if package.has_custom_build() { - rustdoc.env("OUT_DIR", &cx.layout(package, kind).build_out(package)); - } - - trace!("commands={}", rustdoc); - - let primary = package.package_id() == cx.resolve.root(); - let name = package.name().to_string(); - let desc = rustdoc.to_string(); - let exec_engine = cx.exec_engine.clone(); - - Ok(Work::new(move |desc_tx| { - desc_tx.send(desc).unwrap(); - if primary { - try!(exec_engine.exec(rustdoc).chain_error(|| { - human(format!("Could not document `{}`.", name)) - })) - } else { - try!(exec_engine.exec_with_output(rustdoc).and(Ok(())).map_err(|err| { - match err.exit { - Some(..) => { - caused_human(format!("Could not document `{}`.", - name), err) - } - None => { - caused_human("Failed to run rustdoc", err) - } - } - })) - } - Ok(()) - })) -} - -// The path that we pass to rustc is actually fairly important because it will -// show up in error messages and the like. For this reason we take a few moments -// to ensure that something shows up pretty reasonably. -// -// The heuristic here is fairly simple, but the key idea is that the path is -// always "relative" to the current directory in order to be found easily. The -// path is only actually relative if the current directory is an ancestor if it. -// This means that non-path dependencies (git/registry) will likely be shown as -// absolute paths instead of relative paths. -fn root_path(cx: &Context, pkg: &Package, target: &Target) -> PathBuf { - let absolute = pkg.root().join(target.src_path()); - let cwd = cx.config.cwd(); - if absolute.starts_with(cwd) { - util::without_prefix(&absolute, cwd).map(|s| { - s.to_path_buf() - }).unwrap_or(absolute) - } else { - absolute - } -} - -fn build_base_args(cx: &Context, - cmd: &mut CommandPrototype, - pkg: &Package, - target: &Target, - profile: &Profile, - crate_types: &[&str]) { - let Profile { - opt_level, lto, codegen_units, ref rustc_args, debuginfo, debug_assertions, - rpath, test, doc: _doc, - } = *profile; - - // Move to cwd so the root_path() passed below is actually correct - cmd.cwd(cx.config.cwd()); - - // TODO: Handle errors in converting paths into args - cmd.arg(&root_path(cx, pkg, target)); - - cmd.arg("--crate-name").arg(&target.crate_name()); - - for crate_type in crate_types.iter() { - cmd.arg("--crate-type").arg(crate_type); - } - - let prefer_dynamic = (target.for_host() && !target.is_custom_build()) || - (crate_types.contains(&"dylib") && - pkg.package_id() != cx.resolve.root()); - if prefer_dynamic { - cmd.arg("-C").arg("prefer-dynamic"); - } - - if opt_level != 0 { - cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); - } - - // Disable LTO for host builds as prefer_dynamic and it are mutually - // exclusive. - if target.can_lto() && lto && !target.for_host() { - cmd.args(&["-C", "lto"]); - } else { - // There are some restrictions with LTO and codegen-units, so we - // only add codegen units when LTO is not used. - match codegen_units { - Some(n) => { cmd.arg("-C").arg(&format!("codegen-units={}", n)); } - None => {}, - } - } - - if debuginfo { - cmd.arg("-g"); - } - - if let Some(ref args) = *rustc_args { - cmd.args(args); - } - - if debug_assertions && opt_level > 0 { - cmd.args(&["-C", "debug-assertions=on"]); - } else if !debug_assertions && opt_level == 0 { - cmd.args(&["-C", "debug-assertions=off"]); - } - - if test && target.harness() { - cmd.arg("--test"); - } - - match cx.resolve.features(pkg.package_id()) { - Some(features) => { - for feat in features.iter() { - cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); - } - } - None => {} - } - - match cx.target_metadata(pkg, target, profile) { - Some(m) => { - cmd.arg("-C").arg(&format!("metadata={}", m.metadata)); - cmd.arg("-C").arg(&format!("extra-filename={}", m.extra_filename)); - } - None => {} - } - - if rpath { - cmd.arg("-C").arg("rpath"); - } -} - - -fn build_plugin_args(cmd: &mut CommandPrototype, cx: &Context, pkg: &Package, - target: &Target, kind: Kind) { - fn opt(cmd: &mut CommandPrototype, key: &str, prefix: &str, - val: Option<&OsStr>) { - if let Some(val) = val { - let mut joined = OsString::from(prefix); - joined.push(val); - cmd.arg(key).arg(joined); - } - } - - cmd.arg("--out-dir").arg(&cx.out_dir(pkg, kind, target)); - cmd.arg("--emit=dep-info,link"); - - if kind == Kind::Target { - opt(cmd, "--target", "", cx.requested_target().map(|s| s.as_ref())); - } - - opt(cmd, "-C", "ar=", cx.ar(kind).map(|s| s.as_ref())); - opt(cmd, "-C", "linker=", cx.linker(kind).map(|s| s.as_ref())); -} - -fn build_deps_args(cmd: &mut CommandPrototype, - target: &Target, - profile: &Profile, - package: &Package, - cx: &Context, - kind: Kind) - -> CargoResult<()> { - let layout = cx.layout(package, kind); - cmd.arg("-L").arg(&{ - let mut root = OsString::from("dependency="); - root.push(layout.root()); - root - }); - cmd.arg("-L").arg(&{ - let mut deps = OsString::from("dependency="); - deps.push(layout.deps()); - deps - }); - - if package.has_custom_build() { - cmd.env("OUT_DIR", &layout.build_out(package)); - } - - for (pkg, target, p) in cx.dep_targets(package, target, kind, profile) { - if target.linkable() { - try!(link_to(cmd, pkg, target, p, cx, kind)); - } - } - - return Ok(()); - - fn link_to(cmd: &mut CommandPrototype, pkg: &Package, target: &Target, - profile: &Profile, cx: &Context, kind: Kind) -> CargoResult<()> { - let kind = kind.for_target(target); - let layout = cx.layout(pkg, kind); - - for filename in try!(cx.target_filenames(pkg, target, profile, kind)).iter() { - if filename.ends_with(".a") { continue } - let mut v = OsString::new(); - v.push(&target.crate_name()); - v.push("="); - v.push(layout.root()); - v.push(&path::MAIN_SEPARATOR.to_string()); - v.push(&filename); - cmd.arg("--extern").arg(&v); - } - Ok(()) - } -} - -pub fn process(cmd: CommandType, pkg: &Package, _target: &Target, - cx: &Context) -> CargoResult { - // When invoking a tool, we need the *host* deps directory in the dynamic - // library search path for plugins and such which have dynamic dependencies. - let layout = cx.layout(pkg, Kind::Host); - let mut search_path = util::dylib_path(); - search_path.push(layout.deps().to_path_buf()); - - // We want to use the same environment and such as normal processes, but we - // want to override the dylib search path with the one we just calculated. - let search_path = try!(join_paths(&search_path, util::dylib_path_envvar())); - let mut cmd = try!(cx.compilation.process(cmd, pkg)); - cmd.env(util::dylib_path_envvar(), &search_path); - Ok(cmd) -} - -fn envify(s: &str) -> String { - s.chars() - .flat_map(|c| c.to_uppercase()) - .map(|c| if c == '-' {'_'} else {c}) - .collect() -} - -impl Kind { - fn from(target: &Target) -> Kind { - if target.for_host() {Kind::Host} else {Kind::Target} - } - - fn for_target(&self, target: &Target) -> Kind { - // Once we start compiling for the `Host` kind we continue doing so, but - // if we are a `Target` kind and then we start compiling for a target - // that needs to be on the host we lift ourselves up to `Host` - match *self { - Kind::Host => Kind::Host, - Kind::Target if target.for_host() => Kind::Host, - Kind::Target => Kind::Target, - } - } -} diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs index 9fe2f02a1cc..74433005a30 100644 --- a/src/cargo/ops/cargo_test.rs +++ b/src/cargo/ops/cargo_test.rs @@ -1,139 +1,210 @@ -use std::ffi::{OsString, OsStr}; -use std::path::Path; +use std::ffi::OsString; -use core::Source; -use sources::PathSource; -use ops::{self, ExecEngine, ProcessEngine, Compilation}; -use util::{self, CargoResult, ProcessError}; +use crate::core::compiler::{Compilation, Doctest}; +use crate::core::shell::Verbosity; +use crate::core::Workspace; +use crate::ops; +use crate::util::errors::CargoResult; +use crate::util::{CargoTestError, ProcessError, Test}; pub struct TestOptions<'a> { pub compile_opts: ops::CompileOptions<'a>, pub no_run: bool, + pub no_fail_fast: bool, } -#[allow(deprecated)] // connect => join in 1.3 -pub fn run_tests(manifest_path: &Path, - options: &TestOptions, - test_args: &[String]) -> CargoResult> { - let config = options.compile_opts.config; - let compile = match try!(build_and_run(manifest_path, options, test_args)) { - Ok(compile) => compile, - Err(e) => return Ok(Some(e)), - }; - - // If a specific test was requested or we're not running any tests at all, - // don't run any doc tests. - if let ops::CompileFilter::Only { .. } = options.compile_opts.filter { - return Ok(None) +pub fn run_tests( + ws: &Workspace<'_>, + options: &TestOptions<'_>, + test_args: &[&str], +) -> CargoResult> { + let compilation = compile_tests(ws, options)?; + + if options.no_run { + return Ok(None); + } + let (test, mut errors) = run_unit_tests(options, test_args, &compilation)?; + + // If we have an error and want to fail fast, then return. + if !errors.is_empty() && !options.no_fail_fast { + return Ok(Some(CargoTestError::new(test, errors))); + } + + let (doctest, docerrors) = run_doc_tests(options, test_args, &compilation)?; + let test = if docerrors.is_empty() { test } else { doctest }; + errors.extend(docerrors); + if errors.is_empty() { + Ok(None) + } else { + Ok(Some(CargoTestError::new(test, errors))) } +} + +pub fn run_benches( + ws: &Workspace<'_>, + options: &TestOptions<'_>, + args: &[&str], +) -> CargoResult> { + let compilation = compile_tests(ws, options)?; + if options.no_run { - return Ok(None) + return Ok(None); + } + + let mut args = args.to_vec(); + args.push("--bench"); + + let (test, errors) = run_unit_tests(options, &args, &compilation)?; + + match errors.len() { + 0 => Ok(None), + _ => Ok(Some(CargoTestError::new(test, errors))), } +} - let libs = compile.package.targets().iter() - .filter(|t| t.doctested()) - .map(|t| (t.src_path(), t.name(), t.crate_name())); +fn compile_tests<'a>( + ws: &Workspace<'a>, + options: &TestOptions<'a>, +) -> CargoResult> { + let mut compilation = ops::compile(ws, &options.compile_opts)?; + compilation + .tests + .sort_by(|a, b| (a.0.package_id(), &a.1, &a.2).cmp(&(b.0.package_id(), &b.1, &b.2))); + Ok(compilation) +} - for (lib, name, crate_name) in libs { - try!(config.shell().status("Doc-tests", name)); - let mut p = try!(compile.rustdoc_process(&compile.package)); - p.arg("--test").arg(lib) - .arg("--crate-name").arg(&crate_name) - .cwd(compile.package.root()); +/// Runs the unit and integration tests of a package. +fn run_unit_tests( + options: &TestOptions<'_>, + test_args: &[&str], + compilation: &Compilation<'_>, +) -> CargoResult<(Test, Vec)> { + let config = options.compile_opts.config; + let cwd = options.compile_opts.config.cwd(); + + let mut errors = Vec::new(); - for &rust_dep in &[&compile.deps_output, &compile.root_output] { + for &(ref pkg, ref target, ref exe) in &compilation.tests { + let kind = target.kind(); + let test = target.name().to_string(); + let exe_display = exe.strip_prefix(cwd).unwrap_or(exe).display(); + let mut cmd = compilation.target_process(exe, pkg)?; + cmd.args(test_args); + if target.harness() && config.shell().verbosity() == Verbosity::Quiet { + cmd.arg("--quiet"); + } + config + .shell() + .concise(|shell| shell.status("Running", &exe_display))?; + config + .shell() + .verbose(|shell| shell.status("Running", &cmd))?; + + let result = cmd.exec(); + + match result { + Err(e) => { + let e = e.downcast::()?; + errors.push((kind.clone(), test.clone(), pkg.name().to_string(), e)); + if !options.no_fail_fast { + break; + } + } + Ok(()) => {} + } + } + + if errors.len() == 1 { + let (kind, name, pkg_name, e) = errors.pop().unwrap(); + Ok(( + Test::UnitTest { + kind, + name, + pkg_name, + }, + vec![e], + )) + } else { + Ok(( + Test::Multiple, + errors.into_iter().map(|(_, _, _, e)| e).collect(), + )) + } +} + +fn run_doc_tests( + options: &TestOptions<'_>, + test_args: &[&str], + compilation: &Compilation<'_>, +) -> CargoResult<(Test, Vec)> { + let mut errors = Vec::new(); + let config = options.compile_opts.config; + + // We don't build/run doc tests if `target` does not equal `host`. + if compilation.host != compilation.target { + return Ok((Test::Doc, errors)); + } + + for doctest_info in &compilation.to_doc_test { + let Doctest { + package, + target, + deps, + } = doctest_info; + config.shell().status("Doc-tests", target.name())?; + let mut p = compilation.rustdoc_process(package, target)?; + p.arg("--test") + .arg(target.src_path().path().unwrap()) + .arg("--crate-name") + .arg(&target.crate_name()); + + for &rust_dep in &[&compilation.deps_output] { let mut arg = OsString::from("dependency="); arg.push(rust_dep); p.arg("-L").arg(arg); } - for native_dep in compile.native_dirs.values() { + + for native_dep in compilation.native_dirs.iter() { p.arg("-L").arg(native_dep); } - if test_args.len() > 0 { - p.arg("--test-args").arg(&test_args.connect(" ")); + for &host_rust_dep in &[&compilation.host_deps_output] { + let mut arg = OsString::from("dependency="); + arg.push(host_rust_dep); + p.arg("-L").arg(arg); } - for feat in compile.features.iter() { - p.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + for arg in test_args { + p.arg("--test-args").arg(arg); } - for (_, libs) in compile.libraries.iter() { - for &(ref target, ref lib) in libs.iter() { - // Note that we can *only* doctest rlib outputs here. A - // staticlib output cannot be linked by the compiler (it just - // doesn't do that). A dylib output, however, can be linked by - // the compiler, but will always fail. Currently all dylibs are - // built as "static dylibs" where the standard library is - // statically linked into the dylib. The doc tests fail, - // however, for now as they try to link the standard library - // dynamically as well, causing problems. As a result we only - // pass `--extern` for rlib deps and skip out on all other - // artifacts. - if lib.extension() != Some(OsStr::new("rlib")) && - !target.for_host() { - continue - } - let mut arg = OsString::from(target.crate_name()); - arg.push("="); - arg.push(lib); - p.arg("--extern").arg(&arg); + if let Some(cfgs) = compilation.cfgs.get(&package.package_id()) { + for cfg in cfgs.iter() { + p.arg("--cfg").arg(cfg); } } - try!(config.shell().verbose(|shell| { - shell.status("Running", p.to_string()) - })); - match ExecEngine::exec(&mut ProcessEngine, p) { - Ok(()) => {} - Err(e) => return Ok(Some(e)), + for &(ref extern_crate_name, ref lib) in deps.iter() { + let mut arg = OsString::from(extern_crate_name); + arg.push("="); + arg.push(lib); + p.arg("--extern").arg(&arg); } - } - - Ok(None) -} -pub fn run_benches(manifest_path: &Path, - options: &TestOptions, - args: &[String]) -> CargoResult> { - let mut args = args.to_vec(); - args.push("--bench".to_string()); - - Ok(try!(build_and_run(manifest_path, options, &args)).err()) -} + if let Some(flags) = compilation.rustdocflags.get(&package.package_id()) { + p.args(flags); + } -fn build_and_run<'a>(manifest_path: &Path, - options: &TestOptions<'a>, - test_args: &[String]) - -> CargoResult, ProcessError>> { - let config = options.compile_opts.config; - let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(), - config)); - try!(source.update()); - - let mut compile = try!(ops::compile(manifest_path, &options.compile_opts)); - if options.no_run { return Ok(Ok(compile)) } - compile.tests.sort(); - - let cwd = config.cwd(); - for &(_, ref exe) in &compile.tests { - let to_display = match util::without_prefix(exe, &cwd) { - Some(path) => path, - None => &**exe, - }; - let mut cmd = try!(compile.target_process(exe, &compile.package)); - cmd.args(test_args); - try!(config.shell().concise(|shell| { - shell.status("Running", to_display.display().to_string()) - })); - try!(config.shell().verbose(|shell| { - shell.status("Running", cmd.to_string()) - })); - match ExecEngine::exec(&mut ProcessEngine, cmd) { - Ok(()) => {} - Err(e) => return Ok(Err(e)) + config + .shell() + .verbose(|shell| shell.status("Running", p.to_string()))?; + if let Err(e) = p.exec() { + let e = e.downcast::()?; + errors.push(e); + if !options.no_fail_fast { + return Ok((Test::Doc, errors)); + } } } - - Ok(Ok(compile)) + Ok((Test::Doc, errors)) } diff --git a/src/cargo/ops/cargo_uninstall.rs b/src/cargo/ops/cargo_uninstall.rs new file mode 100644 index 00000000000..ee7ba2197b4 --- /dev/null +++ b/src/cargo/ops/cargo_uninstall.rs @@ -0,0 +1,152 @@ +use failure::bail; +use std::collections::BTreeSet; +use std::env; + +use crate::core::PackageId; +use crate::core::{PackageIdSpec, SourceId}; +use crate::ops::common_for_install_and_uninstall::*; +use crate::util::errors::CargoResult; +use crate::util::paths; +use crate::util::Config; +use crate::util::Filesystem; + +pub fn uninstall( + root: Option<&str>, + specs: Vec<&str>, + bins: &[String], + config: &Config, +) -> CargoResult<()> { + if specs.len() > 1 && !bins.is_empty() { + bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant."); + } + + let root = resolve_root(root, config)?; + let scheduled_error = if specs.len() == 1 { + uninstall_one(&root, specs[0], bins, config)?; + false + } else if specs.is_empty() { + uninstall_cwd(&root, bins, config)?; + false + } else { + let mut succeeded = vec![]; + let mut failed = vec![]; + for spec in specs { + let root = root.clone(); + match uninstall_one(&root, spec, bins, config) { + Ok(()) => succeeded.push(spec), + Err(e) => { + crate::handle_error(&e, &mut config.shell()); + failed.push(spec) + } + } + } + + let mut summary = vec![]; + if !succeeded.is_empty() { + summary.push(format!( + "Successfully uninstalled {}!", + succeeded.join(", ") + )); + } + if !failed.is_empty() { + summary.push(format!( + "Failed to uninstall {} (see error(s) above).", + failed.join(", ") + )); + } + + if !succeeded.is_empty() || !failed.is_empty() { + config.shell().status("Summary", summary.join(" "))?; + } + + !failed.is_empty() + }; + + if scheduled_error { + bail!("some packages failed to uninstall"); + } + + Ok(()) +} + +pub fn uninstall_one( + root: &Filesystem, + spec: &str, + bins: &[String], + config: &Config, +) -> CargoResult<()> { + let tracker = InstallTracker::load(config, root)?; + let all_pkgs = tracker.all_installed_bins().map(|(pkg_id, _set)| *pkg_id); + let pkgid = PackageIdSpec::query_str(spec, all_pkgs)?; + uninstall_pkgid(root, tracker, pkgid, bins, config) +} + +fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> { + let tracker = InstallTracker::load(config, root)?; + let source_id = SourceId::for_path(config.cwd())?; + let src = path_source(source_id, config)?; + let pkg = select_pkg(src, None, None, config, true, &mut |path| { + path.read_packages() + })?; + let pkgid = pkg.package_id(); + uninstall_pkgid(root, tracker, pkgid, bins, config) +} + +fn uninstall_pkgid( + root: &Filesystem, + mut tracker: InstallTracker, + pkgid: PackageId, + bins: &[String], + config: &Config, +) -> CargoResult<()> { + let mut to_remove = Vec::new(); + let installed = match tracker.installed_bins(pkgid) { + Some(bins) => bins.clone(), + None => bail!("package `{}` is not installed", pkgid), + }; + + let dst = root.join("bin").into_path_unlocked(); + for bin in &installed { + let bin = dst.join(bin); + if !bin.exists() { + bail!( + "corrupt metadata, `{}` does not exist when it should", + bin.display() + ) + } + } + + let bins = bins + .iter() + .map(|s| { + if s.ends_with(env::consts::EXE_SUFFIX) { + s.to_string() + } else { + format!("{}{}", s, env::consts::EXE_SUFFIX) + } + }) + .collect::>(); + + for bin in bins.iter() { + if !installed.contains(bin) { + bail!("binary `{}` not installed as part of `{}`", bin, pkgid) + } + } + + if bins.is_empty() { + to_remove.extend(installed.iter().map(|b| dst.join(b))); + tracker.remove(pkgid, &installed); + } else { + for bin in bins.iter() { + to_remove.push(dst.join(bin)); + } + tracker.remove(pkgid, &bins); + } + tracker.save()?; + for bin in to_remove { + config.shell().status("Removing", bin.display())?; + paths::remove_file(bin)?; + } + + Ok(()) +} diff --git a/src/cargo/ops/common_for_install_and_uninstall.rs b/src/cargo/ops/common_for_install_and_uninstall.rs new file mode 100644 index 00000000000..498caee3e62 --- /dev/null +++ b/src/cargo/ops/common_for_install_and_uninstall.rs @@ -0,0 +1,773 @@ +use std::collections::{btree_map, BTreeMap, BTreeSet}; +use std::env; +use std::io::prelude::*; +use std::io::SeekFrom; +use std::path::{Path, PathBuf}; + +use failure::{bail, format_err}; +use semver::VersionReq; +use serde::{Deserialize, Serialize}; + +use crate::core::compiler::Freshness; +use crate::core::{Dependency, Package, PackageId, Source, SourceId}; +use crate::ops::{self, CompileFilter, CompileOptions}; +use crate::sources::PathSource; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{Config, ToSemver}; +use crate::util::{FileLock, Filesystem}; + +/// On-disk tracking for which package installed which binary. +/// +/// v1 is an older style, v2 is a new (experimental) style that tracks more +/// information. The new style is only enabled with the `-Z install-upgrade` +/// flag (which sets the `unstable_upgrade` flag). v1 is still considered the +/// source of truth. When v2 is used, it will sync with any changes with v1, +/// and will continue to update v1. +/// +/// This maintains a filesystem lock, preventing other instances of Cargo from +/// modifying at the same time. Drop the value to unlock. +/// +/// If/when v2 is stabilized, it is intended that v1 is retained for a while +/// during a longish transition period, and then v1 can be removed. +pub struct InstallTracker { + v1: CrateListingV1, + v2: CrateListingV2, + v1_lock: FileLock, + v2_lock: Option, + unstable_upgrade: bool, +} + +/// Tracking information for the set of installed packages. +/// +/// This v2 format is unstable and requires the `-Z unstable-upgrade` option +/// to enable. +#[derive(Default, Deserialize, Serialize)] +struct CrateListingV2 { + installs: BTreeMap, + /// Forwards compatibility. + #[serde(flatten)] + other: BTreeMap, +} + +/// Tracking information for the installation of a single package. +/// +/// This tracks the settings that were used when the package was installed. +/// Future attempts to install the same package will check these settings to +/// determine if it needs to be rebuilt/reinstalled. If nothing has changed, +/// then Cargo will inform the user that it is "up to date". +/// +/// This is only used for the (unstable) v2 format. +#[derive(Debug, Deserialize, Serialize)] +struct InstallInfo { + /// Version requested via `--version`. + /// None if `--version` not specified. Currently not used, possibly may be + /// used in the future. + version_req: Option, + /// Set of binary names installed. + bins: BTreeSet, + /// Set of features explicitly enabled. + features: BTreeSet, + all_features: bool, + no_default_features: bool, + /// Either "debug" or "release". + profile: String, + /// The installation target. + /// Either the host or the value specified in `--target`. + /// None if unknown (when loading from v1). + target: Option, + /// Output of `rustc -V`. + /// None if unknown (when loading from v1). + /// Currently not used, possibly may be used in the future. + rustc: Option, + /// Forwards compatibility. + #[serde(flatten)] + other: BTreeMap, +} + +/// Tracking information for the set of installed packages. +#[derive(Default, Deserialize, Serialize)] +pub struct CrateListingV1 { + v1: BTreeMap>, +} + +impl InstallTracker { + /// Create an InstallTracker from information on disk. + pub fn load(config: &Config, root: &Filesystem) -> CargoResult { + let unstable_upgrade = config.cli_unstable().install_upgrade; + let v1_lock = root.open_rw(Path::new(".crates.toml"), config, "crate metadata")?; + let v2_lock = if unstable_upgrade { + Some(root.open_rw(Path::new(".crates2.json"), config, "crate metadata")?) + } else { + None + }; + + let v1 = (|| -> CargoResult<_> { + let mut contents = String::new(); + v1_lock.file().read_to_string(&mut contents)?; + if contents.is_empty() { + Ok(CrateListingV1::default()) + } else { + Ok(toml::from_str(&contents) + .chain_err(|| format_err!("invalid TOML found for metadata"))?) + } + })() + .chain_err(|| { + format_err!( + "failed to parse crate metadata at `{}`", + v1_lock.path().to_string_lossy() + ) + })?; + + let v2 = (|| -> CargoResult<_> { + match &v2_lock { + Some(lock) => { + let mut contents = String::new(); + lock.file().read_to_string(&mut contents)?; + let mut v2 = if contents.is_empty() { + CrateListingV2::default() + } else { + serde_json::from_str(&contents) + .chain_err(|| format_err!("invalid JSON found for metadata"))? + }; + v2.sync_v1(&v1)?; + Ok(v2) + } + None => Ok(CrateListingV2::default()), + } + })() + .chain_err(|| { + format_err!( + "failed to parse crate metadata at `{}`", + v2_lock.as_ref().unwrap().path().to_string_lossy() + ) + })?; + + Ok(InstallTracker { + v1, + v2, + v1_lock, + v2_lock, + unstable_upgrade, + }) + } + + /// Checks if the given package should be built, and checks if executables + /// already exist in the destination directory. + /// + /// Returns a tuple `(freshness, map)`. `freshness` indicates if the + /// package should be built (`Dirty`) or if it is already up-to-date + /// (`Fresh`) and should be skipped. The map maps binary names to the + /// PackageId that installed it (which is None if not known). + /// + /// If there are no duplicates, then it will be considered `Dirty` (i.e., + /// it is OK to build/install). + /// + /// `force=true` will always be considered `Dirty` (i.e., it will always + /// be rebuilt/reinstalled). + /// + /// Returns an error if there is a duplicate and `--force` is not used. + pub fn check_upgrade( + &self, + dst: &Path, + pkg: &Package, + force: bool, + opts: &CompileOptions<'_>, + target: &str, + _rustc: &str, + ) -> CargoResult<(Freshness, BTreeMap>)> { + let exes = exe_names(pkg, &opts.filter); + // Check if any tracked exe's are already installed. + let duplicates = self.find_duplicates(dst, &exes); + if force || duplicates.is_empty() { + return Ok((Freshness::Dirty, duplicates)); + } + // Check if all duplicates come from packages of the same name. If + // there are duplicates from other packages, then --force will be + // required. + // + // There may be multiple matching duplicates if different versions of + // the same package installed different binaries. + // + // This does not check the source_id in order to allow the user to + // switch between different sources. For example, installing from git, + // and then switching to the official crates.io release or vice-versa. + // If the source_id were included, then the user would get possibly + // confusing errors like "package `foo 1.0.0` is already installed" + // and the change of source may not be obvious why it fails. + let matching_duplicates: Vec = duplicates + .values() + .filter_map(|v| match v { + Some(dupe_pkg_id) if dupe_pkg_id.name() == pkg.name() => Some(*dupe_pkg_id), + _ => None, + }) + .collect(); + + // If both sets are the same length, that means all duplicates come + // from packages with the same name. + if self.unstable_upgrade && matching_duplicates.len() == duplicates.len() { + // Determine if it is dirty or fresh. + let source_id = pkg.package_id().source_id(); + if source_id.is_path() { + // `cargo install --path ...` is always rebuilt. + return Ok((Freshness::Dirty, duplicates)); + } + let is_up_to_date = |dupe_pkg_id| { + let info = self + .v2 + .installs + .get(dupe_pkg_id) + .expect("dupes must be in sync"); + let precise_equal = if source_id.is_git() { + // Git sources must have the exact same hash to be + // considered "fresh". + dupe_pkg_id.source_id().precise() == source_id.precise() + } else { + true + }; + + dupe_pkg_id.version() == pkg.version() + && dupe_pkg_id.source_id() == source_id + && precise_equal + && info.is_up_to_date(opts, target, &exes) + }; + if matching_duplicates.iter().all(is_up_to_date) { + Ok((Freshness::Fresh, duplicates)) + } else { + Ok((Freshness::Dirty, duplicates)) + } + } else { + // Format the error message. + let mut msg = String::new(); + for (bin, p) in duplicates.iter() { + msg.push_str(&format!("binary `{}` already exists in destination", bin)); + if let Some(p) = p.as_ref() { + msg.push_str(&format!(" as part of `{}`\n", p)); + } else { + msg.push_str("\n"); + } + } + msg.push_str("Add --force to overwrite"); + bail!("{}", msg); + } + } + + /// Check if any executables are already installed. + /// + /// Returns a map of duplicates, the key is the executable name and the + /// value is the PackageId that is already installed. The PackageId is + /// None if it is an untracked executable. + fn find_duplicates( + &self, + dst: &Path, + exes: &BTreeSet, + ) -> BTreeMap> { + exes.iter() + .filter_map(|name| { + if !dst.join(&name).exists() { + None + } else if self.unstable_upgrade { + let p = self.v2.package_for_bin(name); + Some((name.clone(), p)) + } else { + let p = self.v1.package_for_bin(name); + Some((name.clone(), p)) + } + }) + .collect() + } + + /// Mark that a package was installed. + pub fn mark_installed( + &mut self, + package: &Package, + bins: &BTreeSet, + version_req: Option, + opts: &CompileOptions<'_>, + target: String, + rustc: String, + ) { + if self.unstable_upgrade { + self.v2 + .mark_installed(package, bins, version_req, opts, target, rustc) + } + self.v1.mark_installed(package, bins); + } + + /// Save tracking information to disk. + pub fn save(&self) -> CargoResult<()> { + self.v1.save(&self.v1_lock).chain_err(|| { + format_err!( + "failed to write crate metadata at `{}`", + self.v1_lock.path().to_string_lossy() + ) + })?; + + if self.unstable_upgrade { + self.v2.save(self.v2_lock.as_ref().unwrap()).chain_err(|| { + format_err!( + "failed to write crate metadata at `{}`", + self.v2_lock.as_ref().unwrap().path().to_string_lossy() + ) + })?; + } + Ok(()) + } + + /// Iterator of all installed binaries. + /// Items are `(pkg_id, bins)` where `bins` is the set of binaries that + /// package installed. + pub fn all_installed_bins(&self) -> impl Iterator)> { + self.v1.v1.iter() + } + + /// Set of binaries installed by a particular package. + /// Returns None if the package is not installed. + pub fn installed_bins(&self, pkg_id: PackageId) -> Option<&BTreeSet> { + self.v1.v1.get(&pkg_id) + } + + /// Remove a package from the tracker. + pub fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { + self.v1.remove(pkg_id, bins); + if self.unstable_upgrade { + self.v2.remove(pkg_id, bins); + } + } +} + +impl CrateListingV1 { + fn package_for_bin(&self, bin_name: &str) -> Option { + self.v1 + .iter() + .find(|(_, bins)| bins.contains(bin_name)) + .map(|(pkg_id, _)| *pkg_id) + } + + fn mark_installed(&mut self, pkg: &Package, bins: &BTreeSet) { + // Remove bins from any other packages. + for other_bins in self.v1.values_mut() { + for bin in bins { + other_bins.remove(bin); + } + } + // Remove entries where `bins` is empty. + let to_remove = self + .v1 + .iter() + .filter_map(|(&p, set)| if set.is_empty() { Some(p) } else { None }) + .collect::>(); + for p in to_remove.iter() { + self.v1.remove(p); + } + // Add these bins. + self.v1 + .entry(pkg.package_id()) + .or_insert_with(BTreeSet::new) + .append(&mut bins.clone()); + } + + fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { + let mut installed = match self.v1.entry(pkg_id) { + btree_map::Entry::Occupied(e) => e, + btree_map::Entry::Vacant(..) => panic!("v1 unexpected missing `{}`", pkg_id), + }; + + for bin in bins { + installed.get_mut().remove(bin); + } + if installed.get().is_empty() { + installed.remove(); + } + } + + fn save(&self, lock: &FileLock) -> CargoResult<()> { + let mut file = lock.file(); + file.seek(SeekFrom::Start(0))?; + file.set_len(0)?; + let data = toml::to_string(self)?; + file.write_all(data.as_bytes())?; + Ok(()) + } +} + +impl CrateListingV2 { + /// Incorporate any changes from v1 into self. + /// This handles the initial upgrade to v2, *and* handles the case + /// where v2 is in use, and a v1 update is made, then v2 is used again. + /// i.e., `cargo +new install foo ; cargo +old install bar ; cargo +new install bar` + /// For now, v1 is the source of truth, so its values are trusted over v2. + fn sync_v1(&mut self, v1: &CrateListingV1) -> CargoResult<()> { + // Make the `bins` entries the same. + for (pkg_id, bins) in &v1.v1 { + self.installs + .entry(*pkg_id) + .and_modify(|info| info.bins = bins.clone()) + .or_insert_with(|| InstallInfo::from_v1(bins)); + } + // Remove any packages that aren't present in v1. + let to_remove: Vec<_> = self + .installs + .keys() + .filter(|pkg_id| !v1.v1.contains_key(pkg_id)) + .cloned() + .collect(); + for pkg_id in to_remove { + self.installs.remove(&pkg_id); + } + Ok(()) + } + + fn package_for_bin(&self, bin_name: &str) -> Option { + self.installs + .iter() + .find(|(_, info)| info.bins.contains(bin_name)) + .map(|(pkg_id, _)| *pkg_id) + } + + fn mark_installed( + &mut self, + pkg: &Package, + bins: &BTreeSet, + version_req: Option, + opts: &CompileOptions<'_>, + target: String, + rustc: String, + ) { + // Remove bins from any other packages. + for info in &mut self.installs.values_mut() { + for bin in bins { + info.bins.remove(bin); + } + } + // Remove entries where `bins` is empty. + let to_remove = self + .installs + .iter() + .filter_map(|(&p, info)| if info.bins.is_empty() { Some(p) } else { None }) + .collect::>(); + for p in to_remove.iter() { + self.installs.remove(p); + } + // Add these bins. + if let Some(info) = self.installs.get_mut(&pkg.package_id()) { + info.bins.append(&mut bins.clone()); + info.version_req = version_req; + info.features = feature_set(&opts.features); + info.all_features = opts.all_features; + info.no_default_features = opts.no_default_features; + info.profile = profile_name(opts.build_config.release).to_string(); + info.target = Some(target); + info.rustc = Some(rustc); + } else { + self.installs.insert( + pkg.package_id(), + InstallInfo { + version_req, + bins: bins.clone(), + features: feature_set(&opts.features), + all_features: opts.all_features, + no_default_features: opts.no_default_features, + profile: profile_name(opts.build_config.release).to_string(), + target: Some(target), + rustc: Some(rustc), + other: BTreeMap::new(), + }, + ); + } + } + + fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { + let mut info_entry = match self.installs.entry(pkg_id) { + btree_map::Entry::Occupied(e) => e, + btree_map::Entry::Vacant(..) => panic!("v2 unexpected missing `{}`", pkg_id), + }; + + for bin in bins { + info_entry.get_mut().bins.remove(bin); + } + if info_entry.get().bins.is_empty() { + info_entry.remove(); + } + } + + fn save(&self, lock: &FileLock) -> CargoResult<()> { + let mut file = lock.file(); + file.seek(SeekFrom::Start(0))?; + file.set_len(0)?; + let data = serde_json::to_string(self)?; + file.write_all(data.as_bytes())?; + Ok(()) + } +} + +impl InstallInfo { + fn from_v1(set: &BTreeSet) -> InstallInfo { + InstallInfo { + version_req: None, + bins: set.clone(), + features: BTreeSet::new(), + all_features: false, + no_default_features: false, + profile: "release".to_string(), + target: None, + rustc: None, + other: BTreeMap::new(), + } + } + + /// Determine if this installation is "up to date", or if it needs to be reinstalled. + /// + /// This does not do Package/Source/Version checking. + fn is_up_to_date( + &self, + opts: &CompileOptions<'_>, + target: &str, + exes: &BTreeSet, + ) -> bool { + self.features == feature_set(&opts.features) + && self.all_features == opts.all_features + && self.no_default_features == opts.no_default_features + && self.profile == profile_name(opts.build_config.release) + && (self.target.is_none() || self.target.as_ref().map(|t| t.as_ref()) == Some(target)) + && &self.bins == exes + } +} + +/// Determines the root directory where installation is done. +pub fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult { + let config_root = config.get_path("install.root")?; + Ok(flag + .map(PathBuf::from) + .or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)) + .or_else(move || config_root.map(|v| v.val)) + .map(Filesystem::new) + .unwrap_or_else(|| config.home().clone())) +} + +/// Determines the `PathSource` from a `SourceId`. +pub fn path_source(source_id: SourceId, config: &Config) -> CargoResult> { + let path = source_id + .url() + .to_file_path() + .map_err(|()| format_err!("path sources must have a valid path"))?; + Ok(PathSource::new(&path, source_id, config)) +} + +/// Gets a Package based on command-line requirements. +pub fn select_pkg<'a, T>( + mut source: T, + name: Option<&str>, + vers: Option<&str>, + config: &Config, + needs_update: bool, + list_all: &mut dyn FnMut(&mut T) -> CargoResult>, +) -> CargoResult +where + T: Source + 'a, +{ + // This operation may involve updating some sources or making a few queries + // which may involve frobbing caches, as a result make sure we synchronize + // with other global Cargos + let _lock = config.acquire_package_cache_lock()?; + + if needs_update { + source.update()?; + } + + if let Some(name) = name { + let vers = if let Some(v) = vers { + // If the version begins with character <, >, =, ^, ~ parse it as a + // version range, otherwise parse it as a specific version + let first = v + .chars() + .nth(0) + .ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?; + + let is_req = "<>=^~".contains(first) || v.contains('*'); + if is_req { + match v.parse::() { + Ok(v) => Some(v.to_string()), + Err(_) => bail!( + "the `--vers` provided, `{}`, is \ + not a valid semver version requirement\n\n\ + Please have a look at \ + https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html \ + for the correct format", + v + ), + } + } else { + match v.to_semver() { + Ok(v) => Some(format!("={}", v)), + Err(e) => { + let mut msg = if config.cli_unstable().install_upgrade { + format!( + "the `--vers` provided, `{}`, is \ + not a valid semver version: {}\n", + v, e + ) + } else { + format!( + "the `--vers` provided, `{}`, is \ + not a valid semver version\n\n\ + historically Cargo treated this \ + as a semver version requirement \ + accidentally\nand will continue \ + to do so, but this behavior \ + will be removed eventually", + v + ) + }; + + // If it is not a valid version but it is a valid version + // requirement, add a note to the warning + if v.parse::().is_ok() { + msg.push_str(&format!( + "\nif you want to specify semver range, \ + add an explicit qualifier, like ^{}", + v + )); + } + if config.cli_unstable().install_upgrade { + bail!(msg); + } else { + config.shell().warn(&msg)?; + } + Some(v.to_string()) + } + } + } + } else { + None + }; + let vers = vers.as_ref().map(|s| &**s); + let vers_spec = if vers.is_none() && source.source_id().is_registry() { + // Avoid pre-release versions from crate.io + // unless explicitly asked for + Some("*") + } else { + vers + }; + let dep = Dependency::parse_no_deprecated(name, vers_spec, source.source_id())?; + let deps = source.query_vec(&dep)?; + match deps.iter().map(|p| p.package_id()).max() { + Some(pkgid) => { + let pkg = Box::new(&mut source).download_now(pkgid, config)?; + Ok(pkg) + } + None => { + let vers_info = vers + .map(|v| format!(" with version `{}`", v)) + .unwrap_or_default(); + bail!( + "could not find `{}` in {}{}", + name, + source.source_id(), + vers_info + ) + } + } + } else { + let candidates = list_all(&mut source)?; + let binaries = candidates + .iter() + .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0); + let examples = candidates + .iter() + .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0); + let pkg = match one(binaries, |v| multi_err("binaries", v))? { + Some(p) => p, + None => match one(examples, |v| multi_err("examples", v))? { + Some(p) => p, + None => bail!( + "no packages found with binaries or \ + examples" + ), + }, + }; + return Ok(pkg.clone()); + + fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String { + pkgs.sort_unstable_by_key(|a| a.name()); + format!( + "multiple packages with {} found: {}", + kind, + pkgs.iter() + .map(|p| p.name().as_str()) + .collect::>() + .join(", ") + ) + } + } +} + +/// Get one element from the iterator. +/// Returns None if none left. +/// Returns error if there is more than one item in the iterator. +fn one(mut i: I, f: F) -> CargoResult> +where + I: Iterator, + F: FnOnce(Vec) -> String, +{ + match (i.next(), i.next()) { + (Some(i1), Some(i2)) => { + let mut v = vec![i1, i2]; + v.extend(i); + Err(format_err!("{}", f(v))) + } + (Some(i), None) => Ok(Some(i)), + (None, _) => Ok(None), + } +} + +fn profile_name(release: bool) -> &'static str { + if release { + "release" + } else { + "dev" + } +} + +/// Helper to convert features Vec to a BTreeSet. +fn feature_set(features: &[String]) -> BTreeSet { + features.iter().cloned().collect() +} + +/// Helper to get the executable names from a filter. +pub fn exe_names(pkg: &Package, filter: &ops::CompileFilter) -> BTreeSet { + let to_exe = |name| format!("{}{}", name, env::consts::EXE_SUFFIX); + match filter { + CompileFilter::Default { .. } => pkg + .targets() + .iter() + .filter(|t| t.is_bin()) + .map(|t| to_exe(t.name())) + .collect(), + CompileFilter::Only { + ref bins, + ref examples, + .. + } => { + let all_bins: Vec = bins.try_collect().unwrap_or_else(|| { + pkg.targets() + .iter() + .filter(|t| t.is_bin()) + .map(|t| t.name().to_string()) + .collect() + }); + let all_examples: Vec = examples.try_collect().unwrap_or_else(|| { + pkg.targets() + .iter() + .filter(|t| t.is_exe_example()) + .map(|t| t.name().to_string()) + .collect() + }); + + all_bins + .iter() + .chain(all_examples.iter()) + .map(|name| to_exe(name)) + .collect() + } + } +} diff --git a/src/cargo/ops/fix.rs b/src/cargo/ops/fix.rs new file mode 100644 index 00000000000..1ac31974b72 --- /dev/null +++ b/src/cargo/ops/fix.rs @@ -0,0 +1,716 @@ +//! High-level overview of how `fix` works: +//! +//! The main goal is to run `cargo check` to get rustc to emit JSON +//! diagnostics with suggested fixes that can be applied to the files on the +//! filesystem, and validate that those changes didn't break anything. +//! +//! Cargo begins by launching a `LockServer` thread in the background to +//! listen for network connections to coordinate locking when multiple targets +//! are built simultaneously. It ensures each package has only one fix running +//! at once. +//! +//! The `RustfixDiagnosticServer` is launched in a background thread (in +//! `JobQueue`) to listen for network connections to coordinate displaying +//! messages to the user on the console (so that multiple processes don't try +//! to print at the same time). +//! +//! Cargo begins a normal `cargo check` operation with itself set as a proxy +//! for rustc by setting `rustc_wrapper` in the build config. When +//! cargo launches rustc to check a crate, it is actually launching itself. +//! The `FIX_ENV` environment variable is set so that cargo knows it is in +//! fix-proxy-mode. +//! +//! Each proxied cargo-as-rustc detects it is in fix-proxy-mode (via `FIX_ENV` +//! environment variable in `main`) and does the following: +//! +//! - Acquire a lock from the `LockServer` from the master cargo process. +//! - Launches the real rustc (`rustfix_and_fix`), looking at the JSON output +//! for suggested fixes. +//! - Uses the `rustfix` crate to apply the suggestions to the files on the +//! file system. +//! - If rustfix fails to apply any suggestions (for example, they are +//! overlapping), but at least some suggestions succeeded, it will try the +//! previous two steps up to 4 times as long as some suggestions succeed. +//! - Assuming there's at least one suggestion applied, and the suggestions +//! applied cleanly, rustc is run again to verify the suggestions didn't +//! break anything. The change will be backed out if it fails (unless +//! `--broken-code` is used). +//! - If there are any warnings or errors, rustc will be run one last time to +//! show them to the user. + +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::env; +use std::ffi::OsString; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::{self, Command, ExitStatus}; +use std::str; + +use failure::{Error, ResultExt}; +use log::{debug, trace, warn}; +use rustfix::diagnostics::Diagnostic; +use rustfix::{self, CodeFix}; + +use crate::core::Workspace; +use crate::ops::{self, CompileOptions}; +use crate::util::diagnostic_server::{Message, RustfixDiagnosticServer}; +use crate::util::errors::CargoResult; +use crate::util::{self, paths}; +use crate::util::{existing_vcs_repo, LockServer, LockServerClient}; + +const FIX_ENV: &str = "__CARGO_FIX_PLZ"; +const BROKEN_CODE_ENV: &str = "__CARGO_FIX_BROKEN_CODE"; +const PREPARE_FOR_ENV: &str = "__CARGO_FIX_PREPARE_FOR"; +const EDITION_ENV: &str = "__CARGO_FIX_EDITION"; +const IDIOMS_ENV: &str = "__CARGO_FIX_IDIOMS"; +const CLIPPY_FIX_ARGS: &str = "__CARGO_FIX_CLIPPY_ARGS"; + +pub struct FixOptions<'a> { + pub edition: bool, + pub prepare_for: Option<&'a str>, + pub idioms: bool, + pub compile_opts: CompileOptions<'a>, + pub allow_dirty: bool, + pub allow_no_vcs: bool, + pub allow_staged: bool, + pub broken_code: bool, + pub clippy_args: Option>, +} + +pub fn fix(ws: &Workspace<'_>, opts: &mut FixOptions<'_>) -> CargoResult<()> { + check_version_control(opts)?; + + // Spin up our lock server, which our subprocesses will use to synchronize fixes. + let lock_server = LockServer::new()?; + let mut wrapper = util::process(env::current_exe()?); + wrapper.env(FIX_ENV, lock_server.addr().to_string()); + let _started = lock_server.start()?; + + opts.compile_opts.build_config.force_rebuild = true; + + if opts.broken_code { + wrapper.env(BROKEN_CODE_ENV, "1"); + } + + if opts.edition { + wrapper.env(EDITION_ENV, "1"); + } else if let Some(edition) = opts.prepare_for { + wrapper.env(PREPARE_FOR_ENV, edition); + } + if opts.idioms { + wrapper.env(IDIOMS_ENV, "1"); + } + + if opts.clippy_args.is_some() { + if let Err(e) = util::process("clippy-driver").arg("-V").exec_with_output() { + eprintln!("Warning: clippy-driver not found: {:?}", e); + } + + let clippy_args = opts + .clippy_args + .as_ref() + .map_or_else(String::new, |args| serde_json::to_string(&args).unwrap()); + + wrapper.env(CLIPPY_FIX_ARGS, clippy_args); + } + + *opts + .compile_opts + .build_config + .rustfix_diagnostic_server + .borrow_mut() = Some(RustfixDiagnosticServer::new()?); + + if let Some(server) = opts + .compile_opts + .build_config + .rustfix_diagnostic_server + .borrow() + .as_ref() + { + server.configure(&mut wrapper); + } + + // primary crates are compiled using a cargo subprocess to do extra work of applying fixes and + // repeating build until there are no more changes to be applied + opts.compile_opts.build_config.primary_unit_rustc = Some(wrapper); + + ops::compile(ws, &opts.compile_opts)?; + Ok(()) +} + +fn check_version_control(opts: &FixOptions<'_>) -> CargoResult<()> { + if opts.allow_no_vcs { + return Ok(()); + } + let config = opts.compile_opts.config; + if !existing_vcs_repo(config.cwd(), config.cwd()) { + failure::bail!( + "no VCS found for this package and `cargo fix` can potentially \ + perform destructive changes; if you'd like to suppress this \ + error pass `--allow-no-vcs`" + ) + } + + if opts.allow_dirty && opts.allow_staged { + return Ok(()); + } + + let mut dirty_files = Vec::new(); + let mut staged_files = Vec::new(); + if let Ok(repo) = git2::Repository::discover(config.cwd()) { + let mut repo_opts = git2::StatusOptions::new(); + repo_opts.include_ignored(false); + for status in repo.statuses(Some(&mut repo_opts))?.iter() { + if let Some(path) = status.path() { + match status.status() { + git2::Status::CURRENT => (), + git2::Status::INDEX_NEW + | git2::Status::INDEX_MODIFIED + | git2::Status::INDEX_DELETED + | git2::Status::INDEX_RENAMED + | git2::Status::INDEX_TYPECHANGE => { + if !opts.allow_staged { + staged_files.push(path.to_string()) + } + } + _ => { + if !opts.allow_dirty { + dirty_files.push(path.to_string()) + } + } + }; + } + } + } + + if dirty_files.is_empty() && staged_files.is_empty() { + return Ok(()); + } + + let mut files_list = String::new(); + for file in dirty_files { + files_list.push_str(" * "); + files_list.push_str(&file); + files_list.push_str(" (dirty)\n"); + } + for file in staged_files { + files_list.push_str(" * "); + files_list.push_str(&file); + files_list.push_str(" (staged)\n"); + } + + failure::bail!( + "the working directory of this package has uncommitted changes, and \ + `cargo fix` can potentially perform destructive changes; if you'd \ + like to suppress this error pass `--allow-dirty`, `--allow-staged`, \ + or commit the changes to these files:\n\ + \n\ + {}\n\ + ", + files_list + ); +} + +pub fn fix_maybe_exec_rustc() -> CargoResult { + let lock_addr = match env::var(FIX_ENV) { + Ok(s) => s, + Err(_) => return Ok(false), + }; + + let args = FixArgs::get(); + trace!("cargo-fix as rustc got file {:?}", args.file); + let rustc = args.rustc.as_ref().expect("fix wrapper rustc was not set"); + + let mut fixes = FixedCrate::default(); + if let Some(path) = &args.file { + trace!("start rustfixing {:?}", path); + fixes = rustfix_crate(&lock_addr, rustc.as_ref(), path, &args)?; + } + + // Ok now we have our final goal of testing out the changes that we applied. + // If these changes went awry and actually started to cause the crate to + // *stop* compiling then we want to back them out and continue to print + // warnings to the user. + // + // If we didn't actually make any changes then we can immediately execute the + // new rustc, and otherwise we capture the output to hide it in the scenario + // that we have to back it all out. + if !fixes.files.is_empty() { + let mut cmd = Command::new(&rustc); + args.apply(&mut cmd); + cmd.arg("--error-format=json"); + let output = cmd.output().context("failed to spawn rustc")?; + + if output.status.success() { + for (path, file) in fixes.files.iter() { + Message::Fixing { + file: path.clone(), + fixes: file.fixes_applied, + } + .post()?; + } + } + + // If we succeeded then we'll want to commit to the changes we made, if + // any. If stderr is empty then there's no need for the final exec at + // the end, we just bail out here. + if output.status.success() && output.stderr.is_empty() { + return Ok(true); + } + + // Otherwise, if our rustc just failed, then that means that we broke the + // user's code with our changes. Back out everything and fall through + // below to recompile again. + if !output.status.success() { + if env::var_os(BROKEN_CODE_ENV).is_none() { + for (path, file) in fixes.files.iter() { + fs::write(path, &file.original_code) + .with_context(|_| format!("failed to write file `{}`", path))?; + } + } + log_failed_fix(&output.stderr)?; + } + } + + // This final fall-through handles multiple cases; + // - If the fix failed, show the original warnings and suggestions. + // - If `--broken-code`, show the error messages. + // - If the fix succeeded, show any remaining warnings. + let mut cmd = Command::new(&rustc); + args.apply(&mut cmd); + exit_with(cmd.status().context("failed to spawn rustc")?); +} + +#[derive(Default)] +struct FixedCrate { + files: HashMap, +} + +struct FixedFile { + errors_applying_fixes: Vec, + fixes_applied: u32, + original_code: String, +} + +fn rustfix_crate( + lock_addr: &str, + rustc: &Path, + filename: &Path, + args: &FixArgs, +) -> Result { + args.verify_not_preparing_for_enabled_edition()?; + + // First up, we want to make sure that each crate is only checked by one + // process at a time. If two invocations concurrently check a crate then + // it's likely to corrupt it. + // + // We currently do this by assigning the name on our lock to the manifest + // directory. + let dir = env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR is missing?"); + let _lock = LockServerClient::lock(&lock_addr.parse()?, dir)?; + + // Next up, this is a bit suspicious, but we *iteratively* execute rustc and + // collect suggestions to feed to rustfix. Once we hit our limit of times to + // execute rustc or we appear to be reaching a fixed point we stop running + // rustc. + // + // This is currently done to handle code like: + // + // ::foo::<::Bar>(); + // + // where there are two fixes to happen here: `crate::foo::()`. + // The spans for these two suggestions are overlapping and its difficult in + // the compiler to **not** have overlapping spans here. As a result, a naive + // implementation would feed the two compiler suggestions for the above fix + // into `rustfix`, but one would be rejected because it overlaps with the + // other. + // + // In this case though, both suggestions are valid and can be automatically + // applied! To handle this case we execute rustc multiple times, collecting + // fixes each time we do so. Along the way we discard any suggestions that + // failed to apply, assuming that they can be fixed the next time we run + // rustc. + // + // Naturally, we want a few protections in place here though to avoid looping + // forever or otherwise losing data. To that end we have a few termination + // conditions: + // + // * Do this whole process a fixed number of times. In theory we probably + // need an infinite number of times to apply fixes, but we're not gonna + // sit around waiting for that. + // * If it looks like a fix genuinely can't be applied we need to bail out. + // Detect this when a fix fails to get applied *and* no suggestions + // successfully applied to the same file. In that case looks like we + // definitely can't make progress, so bail out. + let mut fixes = FixedCrate::default(); + let mut last_fix_counts = HashMap::new(); + let iterations = env::var("CARGO_FIX_MAX_RETRIES") + .ok() + .and_then(|n| n.parse().ok()) + .unwrap_or(4); + for _ in 0..iterations { + last_fix_counts.clear(); + for (path, file) in fixes.files.iter_mut() { + last_fix_counts.insert(path.clone(), file.fixes_applied); + // We'll generate new errors below. + file.errors_applying_fixes.clear(); + } + rustfix_and_fix(&mut fixes, rustc, filename, args)?; + let mut progress_yet_to_be_made = false; + for (path, file) in fixes.files.iter_mut() { + if file.errors_applying_fixes.is_empty() { + continue; + } + // If anything was successfully fixed *and* there's at least one + // error, then assume the error was spurious and we'll try again on + // the next iteration. + if file.fixes_applied != *last_fix_counts.get(path).unwrap_or(&0) { + progress_yet_to_be_made = true; + } + } + if !progress_yet_to_be_made { + break; + } + } + + // Any errors still remaining at this point need to be reported as probably + // bugs in Cargo and/or rustfix. + for (path, file) in fixes.files.iter_mut() { + for error in file.errors_applying_fixes.drain(..) { + Message::ReplaceFailed { + file: path.clone(), + message: error, + } + .post()?; + } + } + + Ok(fixes) +} + +/// Executes `rustc` to apply one round of suggestions to the crate in question. +/// +/// This will fill in the `fixes` map with original code, suggestions applied, +/// and any errors encountered while fixing files. +fn rustfix_and_fix( + fixes: &mut FixedCrate, + rustc: &Path, + filename: &Path, + args: &FixArgs, +) -> Result<(), Error> { + // If not empty, filter by these lints. + // TODO: implement a way to specify this. + let only = HashSet::new(); + + let mut cmd = Command::new(rustc); + cmd.arg("--error-format=json"); + args.apply(&mut cmd); + let output = cmd + .output() + .with_context(|_| format!("failed to execute `{}`", rustc.display()))?; + + // If rustc didn't succeed for whatever reasons then we're very likely to be + // looking at otherwise broken code. Let's not make things accidentally + // worse by applying fixes where a bug could cause *more* broken code. + // Instead, punt upwards which will reexec rustc over the original code, + // displaying pretty versions of the diagnostics we just read out. + if !output.status.success() && env::var_os(BROKEN_CODE_ENV).is_none() { + debug!( + "rustfixing `{:?}` failed, rustc exited with {:?}", + filename, + output.status.code() + ); + return Ok(()); + } + + let fix_mode = env::var_os("__CARGO_FIX_YOLO") + .map(|_| rustfix::Filter::Everything) + .unwrap_or(rustfix::Filter::MachineApplicableOnly); + + // Sift through the output of the compiler to look for JSON messages. + // indicating fixes that we can apply. + let stderr = str::from_utf8(&output.stderr).context("failed to parse rustc stderr as UTF-8")?; + + let suggestions = stderr + .lines() + .filter(|x| !x.is_empty()) + .inspect(|y| trace!("line: {}", y)) + // Parse each line of stderr, ignoring errors, as they may not all be JSON. + .filter_map(|line| serde_json::from_str::(line).ok()) + // From each diagnostic, try to extract suggestions from rustc. + .filter_map(|diag| rustfix::collect_suggestions(&diag, &only, fix_mode)); + + // Collect suggestions by file so we can apply them one at a time later. + let mut file_map = HashMap::new(); + let mut num_suggestion = 0; + for suggestion in suggestions { + trace!("suggestion"); + // Make sure we've got a file associated with this suggestion and all + // snippets point to the same file. Right now it's not clear what + // we would do with multiple files. + let file_names = suggestion + .solutions + .iter() + .flat_map(|s| s.replacements.iter()) + .map(|r| &r.snippet.file_name); + + let file_name = if let Some(file_name) = file_names.clone().next() { + file_name.clone() + } else { + trace!("rejecting as it has no solutions {:?}", suggestion); + continue; + }; + + if !file_names.clone().all(|f| f == &file_name) { + trace!("rejecting as it changes multiple files: {:?}", suggestion); + continue; + } + + file_map + .entry(file_name) + .or_insert_with(Vec::new) + .push(suggestion); + num_suggestion += 1; + } + + debug!( + "collected {} suggestions for `{}`", + num_suggestion, + filename.display(), + ); + + for (file, suggestions) in file_map { + // Attempt to read the source code for this file. If this fails then + // that'd be pretty surprising, so log a message and otherwise keep + // going. + let code = match paths::read(file.as_ref()) { + Ok(s) => s, + Err(e) => { + warn!("failed to read `{}`: {}", file, e); + continue; + } + }; + let num_suggestions = suggestions.len(); + debug!("applying {} fixes to {}", num_suggestions, file); + + // If this file doesn't already exist then we just read the original + // code, so save it. If the file already exists then the original code + // doesn't need to be updated as we've just read an interim state with + // some fixes but perhaps not all. + let fixed_file = fixes + .files + .entry(file.clone()) + .or_insert_with(|| FixedFile { + errors_applying_fixes: Vec::new(), + fixes_applied: 0, + original_code: code.clone(), + }); + let mut fixed = CodeFix::new(&code); + + // As mentioned above in `rustfix_crate`, we don't immediately warn + // about suggestions that fail to apply here, and instead we save them + // off for later processing. + for suggestion in suggestions.iter().rev() { + match fixed.apply(suggestion) { + Ok(()) => fixed_file.fixes_applied += 1, + Err(e) => fixed_file.errors_applying_fixes.push(e.to_string()), + } + } + let new_code = fixed.finish()?; + fs::write(&file, new_code).with_context(|_| format!("failed to write file `{}`", file))?; + } + + Ok(()) +} + +fn exit_with(status: ExitStatus) -> ! { + #[cfg(unix)] + { + use std::os::unix::prelude::*; + if let Some(signal) = status.signal() { + eprintln!("child failed with signal `{}`", signal); + process::exit(2); + } + } + process::exit(status.code().unwrap_or(3)); +} + +fn log_failed_fix(stderr: &[u8]) -> Result<(), Error> { + let stderr = str::from_utf8(stderr).context("failed to parse rustc stderr as utf-8")?; + + let diagnostics = stderr + .lines() + .filter(|x| !x.is_empty()) + .filter_map(|line| serde_json::from_str::(line).ok()); + let mut files = BTreeSet::new(); + let mut errors = Vec::new(); + for diagnostic in diagnostics { + errors.push(diagnostic.rendered.unwrap_or(diagnostic.message)); + for span in diagnostic.spans.into_iter() { + files.insert(span.file_name); + } + } + let mut krate = None; + let mut prev_dash_dash_krate_name = false; + for arg in env::args() { + if prev_dash_dash_krate_name { + krate = Some(arg.clone()); + } + + if arg == "--crate-name" { + prev_dash_dash_krate_name = true; + } else { + prev_dash_dash_krate_name = false; + } + } + + let files = files.into_iter().collect(); + Message::FixFailed { + files, + krate, + errors, + } + .post()?; + + Ok(()) +} + +#[derive(Default)] +struct FixArgs { + file: Option, + prepare_for_edition: PrepareFor, + idioms: bool, + enabled_edition: Option, + other: Vec, + rustc: Option, + clippy_args: Vec, +} + +enum PrepareFor { + Next, + Edition(String), + None, +} + +impl Default for PrepareFor { + fn default() -> PrepareFor { + PrepareFor::None + } +} + +impl FixArgs { + fn get() -> FixArgs { + let mut ret = FixArgs::default(); + + if let Ok(clippy_args) = env::var(CLIPPY_FIX_ARGS) { + ret.clippy_args = serde_json::from_str(&clippy_args).unwrap(); + ret.rustc = Some(util::config::clippy_driver()); + } else { + ret.rustc = env::args_os().nth(1).map(PathBuf::from); + } + + for arg in env::args_os().skip(2) { + let path = PathBuf::from(arg); + if path.extension().and_then(|s| s.to_str()) == Some("rs") && path.exists() { + ret.file = Some(path); + continue; + } + if let Some(s) = path.to_str() { + let prefix = "--edition="; + if s.starts_with(prefix) { + ret.enabled_edition = Some(s[prefix.len()..].to_string()); + continue; + } + if s.starts_with("--error-format=") || s.starts_with("--json-rendered=") { + // Cargo may add error-format in some cases, but `cargo + // fix` wants to add its own. + continue; + } + } + ret.other.push(path.into()); + } + if let Ok(s) = env::var(PREPARE_FOR_ENV) { + ret.prepare_for_edition = PrepareFor::Edition(s); + } else if env::var(EDITION_ENV).is_ok() { + ret.prepare_for_edition = PrepareFor::Next; + } + + ret.idioms = env::var(IDIOMS_ENV).is_ok(); + ret + } + + fn apply(&self, cmd: &mut Command) { + if let Some(path) = &self.file { + cmd.arg(path); + } + + if !self.clippy_args.is_empty() { + cmd.args(&self.clippy_args); + } + + cmd.args(&self.other).arg("--cap-lints=warn"); + if let Some(edition) = &self.enabled_edition { + cmd.arg("--edition").arg(edition); + if self.idioms && edition == "2018" { + cmd.arg("-Wrust-2018-idioms"); + } + } + + if let Some(edition) = self.prepare_for_edition_resolve() { + cmd.arg("-W").arg(format!("rust-{}-compatibility", edition)); + } + } + + /// Verifies that we're not both preparing for an enabled edition and enabling + /// the edition. + /// + /// This indicates that `cargo fix --prepare-for` is being executed out of + /// order with enabling the edition itself, meaning that we wouldn't + /// actually be able to fix anything! If it looks like this is happening + /// then yield an error to the user, indicating that this is happening. + fn verify_not_preparing_for_enabled_edition(&self) -> CargoResult<()> { + let edition = match self.prepare_for_edition_resolve() { + Some(s) => s, + None => return Ok(()), + }; + let enabled = match &self.enabled_edition { + Some(s) => s, + None => return Ok(()), + }; + if edition != enabled { + return Ok(()); + } + let path = match &self.file { + Some(s) => s, + None => return Ok(()), + }; + + Message::EditionAlreadyEnabled { + file: path.display().to_string(), + edition: edition.to_string(), + } + .post()?; + + process::exit(1); + } + + fn prepare_for_edition_resolve(&self) -> Option<&str> { + match &self.prepare_for_edition { + PrepareFor::Edition(s) => Some(s), + PrepareFor::Next => Some(self.next_edition()), + PrepareFor::None => None, + } + } + + fn next_edition(&self) -> &str { + match self.enabled_edition.as_ref().map(|s| &**s) { + // 2015 -> 2018, + None | Some("2015") => "2018", + + // This'll probably be wrong in 2020, but that's future Cargo's + // problem. Eventually though we'll just add more editions here as + // necessary. + _ => "2018", + } + } +} diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index 12e08c0715c..c0a27a9965b 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -1,87 +1,198 @@ -use std::fs::File; use std::io::prelude::*; -use std::path::Path; -use rustc_serialize::{Encodable, Decodable}; -use toml::{self, Encoder, Value}; +use toml; -use core::{Resolve, resolver, Package, SourceId}; -use util::{CargoResult, ChainError, human}; -use util::toml as cargo_toml; +use crate::core::{resolver, Resolve, Workspace}; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::toml as cargo_toml; +use crate::util::Filesystem; -pub fn load_pkg_lockfile(pkg: &Package) -> CargoResult> { - let lockfile = pkg.root().join("Cargo.lock"); - let source_id = pkg.package_id().source_id(); - load_lockfile(&lockfile, source_id).chain_error(|| { - human(format!("failed to parse lock file at: {}", lockfile.display())) - }) -} +pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult> { + if !ws.root().join("Cargo.lock").exists() { + return Ok(None); + } -pub fn load_lockfile(path: &Path, sid: &SourceId) -> CargoResult> { - // If there is no lockfile, return none. - let mut f = match File::open(path) { - Ok(f) => f, - Err(_) => return Ok(None) - }; + let root = Filesystem::new(ws.root().to_path_buf()); + let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?; let mut s = String::new(); - try!(f.read_to_string(&mut s)); + f.read_to_string(&mut s) + .chain_err(|| format!("failed to read file: {}", f.path().display()))?; + + let resolve = (|| -> CargoResult> { + let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; + let v: resolver::EncodableResolve = resolve.try_into()?; + Ok(Some(v.into_resolve(ws)?)) + })() + .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?; + Ok(resolve) +} - let table = toml::Value::Table(try!(cargo_toml::parse(&s, path))); - let mut d = toml::Decoder::new(table); - let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d)); - Ok(Some(try!(v.to_resolve(sid)))) +/// Generate a toml String of Cargo.lock from a Resolve. +pub fn resolve_to_string(ws: &Workspace<'_>, resolve: &Resolve) -> CargoResult { + let (_orig, out, _ws_root) = resolve_to_string_orig(ws, resolve)?; + Ok(out) } -pub fn write_pkg_lockfile(pkg: &Package, resolve: &Resolve) -> CargoResult<()> { - let loc = pkg.root().join("Cargo.lock"); - write_lockfile(&loc, resolve) +pub fn write_pkg_lockfile(ws: &Workspace<'_>, resolve: &Resolve) -> CargoResult<()> { + let (orig, out, ws_root) = resolve_to_string_orig(ws, resolve)?; + + // If the lock file contents haven't changed so don't rewrite it. This is + // helpful on read-only filesystems. + if let Some(orig) = orig { + if are_equal_lockfiles(orig, &out, ws) { + return Ok(()); + } + } + + if !ws.config().lock_update_allowed() { + if ws.config().offline() { + failure::bail!("can't update in the offline mode"); + } + + let flag = if ws.config().network_allowed() { + "--locked" + } else { + "--frozen" + }; + failure::bail!( + "the lock file {} needs to be updated but {} was passed to \ + prevent this", + ws.root().to_path_buf().join("Cargo.lock").display(), + flag + ); + } + + // Ok, if that didn't work just write it out + ws_root + .open_rw("Cargo.lock", ws.config(), "Cargo.lock file") + .and_then(|mut f| { + f.file().set_len(0)?; + f.write_all(out.as_bytes())?; + Ok(()) + }) + .chain_err(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?; + Ok(()) } -pub fn write_lockfile(dst: &Path, resolve: &Resolve) -> CargoResult<()> { - let mut e = Encoder::new(); - resolve.encode(&mut e).unwrap(); +fn resolve_to_string_orig( + ws: &Workspace<'_>, + resolve: &Resolve, +) -> CargoResult<(Option, String, Filesystem)> { + // Load the original lock file if it exists. + let ws_root = Filesystem::new(ws.root().to_path_buf()); + let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"); + let orig = orig.and_then(|mut f| { + let mut s = String::new(); + f.read_to_string(&mut s)?; + Ok(s) + }); + + let toml = toml::Value::try_from(resolve).unwrap(); let mut out = String::new(); - // Note that we do not use e.toml.to_string() as we want to control the - // exact format the toml is in to ensure pretty diffs between updates to the - // lockfile. - let root = e.toml.get(&"root".to_string()).unwrap(); - - out.push_str("[root]\n"); - emit_package(root.as_table().unwrap(), &mut out); + // At the start of the file we notify the reader that the file is generated. + // Specifically Phabricator ignores files containing "@generated", so we use that. + let marker_line = "# This file is automatically @generated by Cargo."; + let extra_line = "# It is not intended for manual editing."; + out.push_str(marker_line); + out.push('\n'); + out.push_str(extra_line); + out.push('\n'); + // and preserve any other top comments + if let Ok(orig) = &orig { + let mut comments = orig.lines().take_while(|line| line.starts_with('#')); + if let Some(first) = comments.next() { + if first != marker_line { + out.push_str(first); + out.push('\n'); + } + if let Some(second) = comments.next() { + if second != extra_line { + out.push_str(second); + out.push('\n'); + } + for line in comments { + out.push_str(line); + out.push('\n'); + } + } + } + } - let deps = e.toml.get(&"package".to_string()).unwrap().as_slice().unwrap(); - for dep in deps.iter() { + let deps = toml["package"].as_array().unwrap(); + for (i, dep) in deps.iter().enumerate() { + if i > 0 { + out.push_str("\n"); + } let dep = dep.as_table().unwrap(); out.push_str("[[package]]\n"); emit_package(dep, &mut out); } - match e.toml.get(&"metadata".to_string()) { - Some(metadata) => { - out.push_str("[metadata]\n"); - out.push_str(&metadata.to_string()); + if let Some(patch) = toml.get("patch") { + let list = patch["unused"].as_array().unwrap(); + for entry in list { + out.push_str("\n[[patch.unused]]\n"); + emit_package(entry.as_table().unwrap(), &mut out); } - None => {} } - try!(try!(File::create(dst)).write_all(out.as_bytes())); - Ok(()) + if let Some(meta) = toml.get("metadata") { + out.push_str("\n"); + out.push_str("[metadata]\n"); + out.push_str(&meta.to_string()); + } + + Ok((orig.ok(), out, ws_root)) +} + +fn are_equal_lockfiles(mut orig: String, current: &str, ws: &Workspace<'_>) -> bool { + if has_crlf_line_endings(&orig) { + orig = orig.replace("\r\n", "\n"); + } + + // If we want to try and avoid updating the lock file, parse both and + // compare them; since this is somewhat expensive, don't do it in the + // common case where we can update lock files. + if !ws.config().lock_update_allowed() { + let res: CargoResult = (|| { + let old: resolver::EncodableResolve = toml::from_str(&orig)?; + let new: resolver::EncodableResolve = toml::from_str(current)?; + Ok(old.into_resolve(ws)? == new.into_resolve(ws)?) + })(); + if let Ok(true) = res { + return true; + } + } + + current == orig +} + +fn has_crlf_line_endings(s: &str) -> bool { + // Only check the first line. + if let Some(lf) = s.find('\n') { + s[..lf].ends_with('\r') + } else { + false + } } -fn emit_package(dep: &toml::Table, out: &mut String) { - out.push_str(&format!("name = {}\n", lookup(dep, "name"))); - out.push_str(&format!("version = {}\n", lookup(dep, "version"))); +fn emit_package(dep: &toml::value::Table, out: &mut String) { + out.push_str(&format!("name = {}\n", &dep["name"])); + out.push_str(&format!("version = {}\n", &dep["version"])); if dep.contains_key("source") { - out.push_str(&format!("source = {}\n", lookup(dep, "source"))); + out.push_str(&format!("source = {}\n", &dep["source"])); + } + if dep.contains_key("checksum") { + out.push_str(&format!("checksum = {}\n", &dep["checksum"])); } - if let Some(ref s) = dep.get("dependencies") { - let slice = Value::as_slice(*s).unwrap(); + if let Some(s) = dep.get("dependencies") { + let slice = s.as_array().unwrap(); if !slice.is_empty() { out.push_str("dependencies = [\n"); @@ -92,10 +203,7 @@ fn emit_package(dep: &toml::Table, out: &mut String) { out.push_str("]\n"); } - out.push_str("\n"); + } else if dep.contains_key("replace") { + out.push_str(&format!("replace = {}\n", &dep["replace"])); } } - -fn lookup<'a>(table: &'a toml::Table, key: &str) -> &'a toml::Value { - table.get(key).expect(&format!("didn't find {}", key)) -} diff --git a/src/cargo/ops/mod.rs b/src/cargo/ops/mod.rs index 545187a98df..304e4d3c3f1 100644 --- a/src/cargo/ops/mod.rs +++ b/src/cargo/ops/mod.rs @@ -1,41 +1,49 @@ pub use self::cargo_clean::{clean, CleanOptions}; -pub use self::cargo_compile::{compile, compile_pkg, CompileOptions}; -pub use self::cargo_compile::{CompileFilter, CompileMode}; -pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages}; -pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind}; -pub use self::cargo_rustc::{Context, LayoutProxy}; -pub use self::cargo_rustc::Platform; -pub use self::cargo_rustc::{BuildOutput, BuildConfig, TargetConfig}; -pub use self::cargo_rustc::{CommandType, CommandPrototype, ExecEngine, ProcessEngine}; -pub use self::cargo_run::run; -pub use self::cargo_new::{new, NewOptions, VersionControl}; +pub use self::cargo_compile::{compile, compile_with_exec, compile_ws, CompileOptions}; +pub use self::cargo_compile::{CompileFilter, FilterRule, LibRule, Packages}; pub use self::cargo_doc::{doc, DocOptions}; -pub use self::cargo_generate_lockfile::{generate_lockfile}; -pub use self::cargo_generate_lockfile::{update_lockfile}; +pub use self::cargo_fetch::{fetch, FetchOptions}; +pub use self::cargo_generate_lockfile::generate_lockfile; +pub use self::cargo_generate_lockfile::update_lockfile; pub use self::cargo_generate_lockfile::UpdateOptions; -pub use self::lockfile::{load_lockfile, load_pkg_lockfile}; -pub use self::lockfile::{write_lockfile, write_pkg_lockfile}; -pub use self::cargo_test::{run_tests, run_benches, TestOptions}; -pub use self::cargo_package::package; -pub use self::registry::{publish, registry_configuration, RegistryConfig}; -pub use self::registry::{registry_login, search, http_proxy_exists, http_handle}; -pub use self::registry::{modify_owners, yank, OwnersOptions}; -pub use self::cargo_fetch::{fetch}; +pub use self::cargo_install::{install, install_list}; +pub use self::cargo_new::{init, new, NewOptions, VersionControl}; +pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions}; +pub use self::cargo_package::{package, PackageOpts}; pub use self::cargo_pkgid::pkgid; -pub use self::resolve::{resolve_pkg, resolve_with_previous}; +pub use self::cargo_read_manifest::{read_package, read_packages}; +pub use self::cargo_run::run; +pub use self::cargo_test::{run_benches, run_tests, TestOptions}; +pub use self::cargo_uninstall::uninstall; +pub use self::fix::{fix, fix_maybe_exec_rustc, FixOptions}; +pub use self::lockfile::{load_pkg_lockfile, resolve_to_string, write_pkg_lockfile}; +pub use self::registry::HttpTimeout; +pub use self::registry::{configure_http_handle, http_handle_and_timeout}; +pub use self::registry::{http_handle, needs_custom_http_transport, registry_login, search}; +pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts}; +pub use self::registry::{publish, registry_configuration, RegistryConfig}; +pub use self::resolve::{ + add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws, resolve_ws_with_opts, +}; +pub use self::vendor::{vendor, VendorOptions}; mod cargo_clean; mod cargo_compile; mod cargo_doc; mod cargo_fetch; mod cargo_generate_lockfile; +mod cargo_install; mod cargo_new; +mod cargo_output_metadata; mod cargo_package; mod cargo_pkgid; mod cargo_read_manifest; mod cargo_run; -mod cargo_rustc; mod cargo_test; +mod cargo_uninstall; +mod common_for_install_and_uninstall; +mod fix; mod lockfile; mod registry; mod resolve; +mod vendor; diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 9f9b035f804..20d7aa01ea8 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -1,208 +1,513 @@ -use std::collections::HashMap; -use std::env; +use std::collections::{BTreeMap, HashSet}; use std::fs::{self, File}; -use std::io::prelude::*; +use std::io::{self, BufRead}; use std::iter::repeat; -use std::path::{Path, PathBuf}; - -use curl::http; -use git2; -use registry::{Registry, NewCrate, NewCrateDependency}; -use term::color::BLACK; - -use core::source::Source; -use core::{Package, SourceId}; -use core::dependency::Kind; -use core::manifest::ManifestMetadata; -use ops; -use sources::{PathSource, RegistrySource}; -use util::config; -use util::{CargoResult, human, ChainError, ToUrl}; -use util::config::{Config, ConfigValue, Location}; -use util::important_paths::find_root_manifest_for_cwd; +use std::str; +use std::time::Duration; +use std::{cmp, env}; + +use crates_io::{NewCrate, NewCrateDependency, Registry}; +use curl::easy::{Easy, InfoType, SslOpt}; +use failure::{bail, format_err}; +use log::{log, Level}; +use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; + +use crate::core::dependency::Kind; +use crate::core::manifest::ManifestMetadata; +use crate::core::source::Source; +use crate::core::{Package, SourceId, Workspace}; +use crate::ops; +use crate::sources::{RegistrySource, SourceConfigMap, CRATES_IO_REGISTRY}; +use crate::util::config::{self, Config}; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::important_paths::find_root_manifest_for_wd; +use crate::util::IntoUrl; +use crate::util::{paths, validate_package_name}; +use crate::version; pub struct RegistryConfig { pub index: Option, pub token: Option, } -pub fn publish(manifest_path: &Path, - config: &Config, - token: Option, - index: Option, - verify: bool) -> CargoResult<()> { - let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), - config)); - try!(src.update()); - let pkg = try!(src.root_package()); +pub struct PublishOpts<'cfg> { + pub config: &'cfg Config, + pub token: Option, + pub index: Option, + pub verify: bool, + pub allow_dirty: bool, + pub jobs: Option, + pub target: Option, + pub dry_run: bool, + pub registry: Option, + pub features: Vec, + pub all_features: bool, + pub no_default_features: bool, +} + +pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { + let pkg = ws.current()?; + + if let Some(ref allowed_registries) = *pkg.publish() { + let reg_name = opts + .registry + .clone() + .unwrap_or_else(|| CRATES_IO_REGISTRY.to_string()); + if !allowed_registries.contains(®_name) { + bail!( + "`{}` cannot be published.\n\ + The registry `{}` is not listed in the `publish` value in Cargo.toml.", + pkg.name(), + reg_name + ); + } + } - let (mut registry, reg_id) = try!(registry(config, token, index)); - try!(verify_dependencies(&pkg, ®_id)); + let (mut registry, reg_id) = registry( + opts.config, + opts.token.clone(), + opts.index.clone(), + opts.registry.clone(), + true, + !opts.dry_run, + )?; + verify_dependencies(pkg, ®istry, reg_id)?; // Prepare a tarball, with a non-surpressable warning if metadata // is missing since this is being put online. - let tarball = try!(ops::package(manifest_path, config, verify, - false, true)).unwrap(); + let tarball = ops::package( + ws, + &ops::PackageOpts { + config: opts.config, + verify: opts.verify, + list: false, + check_metadata: true, + allow_dirty: opts.allow_dirty, + target: opts.target.clone(), + jobs: opts.jobs, + features: opts.features.clone(), + all_features: opts.all_features, + no_default_features: opts.no_default_features, + }, + )? + .unwrap(); // Upload said tarball to the specified destination - try!(config.shell().status("Uploading", pkg.package_id().to_string())); - try!(transmit(&pkg, &tarball, &mut registry)); + opts.config + .shell() + .status("Uploading", pkg.package_id().to_string())?; + transmit( + opts.config, + pkg, + tarball.file(), + &mut registry, + reg_id, + opts.dry_run, + )?; Ok(()) } -fn verify_dependencies(pkg: &Package, registry_src: &SourceId) - -> CargoResult<()> { +fn verify_dependencies( + pkg: &Package, + registry: &Registry, + registry_src: SourceId, +) -> CargoResult<()> { for dep in pkg.dependencies().iter() { if dep.source_id().is_path() { - if dep.specified_req().is_none() { - return Err(human(format!("all path dependencies must have \ - a version specified when \ - publishing.\n\ - dependency `{}` does not specify \ - a version", dep.name()))) + if !dep.specified_req() { + bail!( + "all path dependencies must have a version specified \ + when publishing.\ndependency `{}` does not specify \ + a version", + dep.package_name() + ) } } else if dep.source_id() != registry_src { - return Err(human(format!("all dependencies must come from the \ - same source.\ndependency `{}` comes \ - from {} instead", dep.name(), - dep.source_id()))) + if dep.source_id().is_registry() { + // Block requests to send to crates.io with alt-registry deps. + // This extra hostname check is mostly to assist with testing, + // but also prevents someone using `--index` to specify + // something that points to crates.io. + if registry_src.is_default_registry() || registry.host_is_crates_io() { + bail!("crates cannot be published to crates.io with dependencies sourced from other\n\ + registries either publish `{}` on crates.io or pull it into this repository\n\ + and specify it with a path and version\n\ + (crate `{}` is pulled from {})", + dep.package_name(), + dep.package_name(), + dep.source_id()); + } + } else { + bail!( + "crates cannot be published with dependencies sourced from \ + a repository\neither publish `{}` as its own crate and \ + specify a version as a dependency or pull it into this \ + repository and specify it with a path and version\n(crate `{}` has \ + repository path `{}`)", + dep.package_name(), + dep.package_name(), + dep.source_id() + ); + } } } Ok(()) } -fn transmit(pkg: &Package, tarball: &Path, registry: &mut Registry) - -> CargoResult<()> { - let deps = pkg.dependencies().iter().map(|dep| { - NewCrateDependency { - optional: dep.is_optional(), - default_features: dep.uses_default_features(), - name: dep.name().to_string(), - features: dep.features().to_vec(), - version_req: dep.version_req().to_string(), - target: dep.only_for_platform().map(|s| s.to_string()), - kind: match dep.kind() { - Kind::Normal => "normal", - Kind::Build => "build", - Kind::Development => "dev", - }.to_string(), - } - }).collect::>(); +fn transmit( + config: &Config, + pkg: &Package, + tarball: &File, + registry: &mut Registry, + registry_id: SourceId, + dry_run: bool, +) -> CargoResult<()> { + let deps = pkg + .dependencies() + .iter() + .map(|dep| { + // If the dependency is from a different registry, then include the + // registry in the dependency. + let dep_registry_id = match dep.registry_id() { + Some(id) => id, + None => SourceId::crates_io(config)?, + }; + // In the index and Web API, None means "from the same registry" + // whereas in Cargo.toml, it means "from crates.io". + let dep_registry = if dep_registry_id != registry_id { + Some(dep_registry_id.url().to_string()) + } else { + None + }; + + Ok(NewCrateDependency { + optional: dep.is_optional(), + default_features: dep.uses_default_features(), + name: dep.package_name().to_string(), + features: dep.features().iter().map(|s| s.to_string()).collect(), + version_req: dep.version_req().to_string(), + target: dep.platform().map(|s| s.to_string()), + kind: match dep.kind() { + Kind::Normal => "normal", + Kind::Build => "build", + Kind::Development => "dev", + } + .to_string(), + registry: dep_registry, + explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()), + }) + }) + .collect::>>()?; let manifest = pkg.manifest(); let ManifestMetadata { - ref authors, ref description, ref homepage, ref documentation, - ref keywords, ref readme, ref repository, ref license, ref license_file, + ref authors, + ref description, + ref homepage, + ref documentation, + ref keywords, + ref readme, + ref repository, + ref license, + ref license_file, + ref categories, + ref badges, + ref links, } = *manifest.metadata(); - let readme = match *readme { - Some(ref readme) => { - let path = pkg.root().join(readme); - let mut contents = String::new(); - try!(File::open(&path).and_then(|mut f| { - f.read_to_string(&mut contents) - }).chain_error(|| { - human("failed to read the specified README") - })); - Some(contents) - } + let readme_content = match *readme { + Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?), None => None, }; - match *license_file { - Some(ref file) => { - if fs::metadata(&pkg.root().join(file)).is_err() { - return Err(human(format!("the license file `{}` does not exist", - file))) + if let Some(ref file) = *license_file { + if fs::metadata(&pkg.root().join(file)).is_err() { + bail!("the license file `{}` does not exist", file) + } + } + + // Do not upload if performing a dry run + if dry_run { + config.shell().warn("aborting upload due to dry run")?; + return Ok(()); + } + + let summary = pkg.summary(); + let string_features = summary + .features() + .iter() + .map(|(feat, values)| { + ( + feat.to_string(), + values.1.iter().map(|fv| fv.to_string(&summary)).collect(), + ) + }) + .collect::>>(); + + let publish = registry.publish( + &NewCrate { + name: pkg.name().to_string(), + vers: pkg.version().to_string(), + deps, + features: string_features, + authors: authors.clone(), + description: description.clone(), + homepage: homepage.clone(), + documentation: documentation.clone(), + keywords: keywords.clone(), + categories: categories.clone(), + readme: readme_content, + readme_file: readme.clone(), + repository: repository.clone(), + license: license.clone(), + license_file: license_file.clone(), + badges: badges.clone(), + links: links.clone(), + }, + tarball, + ); + + match publish { + Ok(warnings) => { + if !warnings.invalid_categories.is_empty() { + let msg = format!( + "the following are not valid category slugs and were \ + ignored: {}. Please see https://crates.io/category_slugs \ + for the list of all category slugs. \ + ", + warnings.invalid_categories.join(", ") + ); + config.shell().warn(&msg)?; + } + + if !warnings.invalid_badges.is_empty() { + let msg = format!( + "the following are not valid badges and were ignored: {}. \ + Either the badge type specified is unknown or a required \ + attribute is missing. Please see \ + https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \ + for valid badge types and their required attributes.", + warnings.invalid_badges.join(", ") + ); + config.shell().warn(&msg)?; } + + if !warnings.other.is_empty() { + for msg in warnings.other { + config.shell().warn(&msg)?; + } + } + + Ok(()) } - None => {} + Err(e) => Err(e), } - registry.publish(&NewCrate { - name: pkg.name().to_string(), - vers: pkg.version().to_string(), - deps: deps, - features: pkg.summary().features().clone(), - authors: authors.clone(), - description: description.clone(), - homepage: homepage.clone(), - documentation: documentation.clone(), - keywords: keywords.clone(), - readme: readme, - repository: repository.clone(), - license: license.clone(), - license_file: license_file.clone(), - }, tarball).map_err(|e| { - human(e.to_string()) - }) } -pub fn registry_configuration(config: &Config) -> CargoResult { - let index = try!(config.get_string("registry.index")).map(|p| p.0); - let token = try!(config.get_string("registry.token")).map(|p| p.0); - Ok(RegistryConfig { index: index, token: token }) +pub fn registry_configuration( + config: &Config, + registry: Option, +) -> CargoResult { + let (index, token) = match registry { + Some(registry) => { + validate_package_name(®istry, "registry name", "")?; + ( + Some(config.get_registry_index(®istry)?.to_string()), + config + .get_string(&format!("registries.{}.token", registry))? + .map(|p| p.val), + ) + } + None => { + // Checking for default index and token + ( + config + .get_default_registry_index()? + .map(|url| url.to_string()), + config.get_string("registry.token")?.map(|p| p.val), + ) + } + }; + + Ok(RegistryConfig { index, token }) } -pub fn registry(config: &Config, - token: Option, - index: Option) -> CargoResult<(Registry, SourceId)> { +fn registry( + config: &Config, + token: Option, + index: Option, + registry: Option, + force_update: bool, + validate_token: bool, +) -> CargoResult<(Registry, SourceId)> { // Parse all configuration options let RegistryConfig { token: token_config, index: index_config, - } = try!(registry_configuration(config)); + } = registry_configuration(config, registry.clone())?; let token = token.or(token_config); - let index = index.or(index_config).unwrap_or(RegistrySource::default_url()); - let index = try!(index.to_url().map_err(human)); - let sid = SourceId::for_registry(&index); + let sid = get_source_id(config, index_config.or(index), registry)?; let api_host = { - let mut src = RegistrySource::new(&sid, config); - try!(src.update().chain_error(|| { - human(format!("Failed to update registry {}", index)) - })); - (try!(src.config())).api + let _lock = config.acquire_package_cache_lock()?; + let mut src = RegistrySource::remote(sid, &HashSet::new(), config); + // Only update the index if the config is not available or `force` is set. + let cfg = src.config(); + let cfg = if force_update || cfg.is_err() { + src.update() + .chain_err(|| format!("failed to update {}", sid))?; + cfg.or_else(|_| src.config())? + } else { + cfg.unwrap() + }; + cfg.and_then(|cfg| cfg.api) + .ok_or_else(|| format_err!("{} does not support API commands", sid))? + }; + let handle = http_handle(config)?; + if validate_token && token.is_none() { + bail!("no upload token found, please run `cargo login`"); }; - let handle = try!(http_handle(config)); Ok((Registry::new_handle(api_host, token, handle), sid)) } -/// Create a new HTTP handle with appropriate global configuration for cargo. -pub fn http_handle(config: &Config) -> CargoResult { +/// Creates a new HTTP handle with appropriate global configuration for cargo. +pub fn http_handle(config: &Config) -> CargoResult { + let (mut handle, timeout) = http_handle_and_timeout(config)?; + timeout.configure(&mut handle)?; + Ok(handle) +} + +pub fn http_handle_and_timeout(config: &Config) -> CargoResult<(Easy, HttpTimeout)> { + if config.frozen() { + bail!( + "attempting to make an HTTP request, but --frozen was \ + specified" + ) + } + if !config.network_allowed() { + bail!("can't make HTTP request in the offline mode") + } + // The timeout option for libcurl by default times out the entire transfer, // but we probably don't want this. Instead we only set timeouts for the // connect phase as well as a "low speed" timeout so if we don't receive // many bytes in a large-ish period of time then we time out. - let handle = http::handle().timeout(0) - .connect_timeout(30_000 /* milliseconds */) - .low_speed_limit(10 /* bytes per second */) - .low_speed_timeout(30 /* seconds */); - let handle = match try!(http_proxy(config)) { - Some(proxy) => handle.proxy(proxy), - None => handle, - }; - let handle = match try!(http_timeout(config)) { - Some(timeout) => handle.connect_timeout(timeout as usize) - .low_speed_timeout((timeout as usize) / 1000), - None => handle, - }; - Ok(handle) + let mut handle = Easy::new(); + let timeout = configure_http_handle(config, &mut handle)?; + Ok((handle, timeout)) +} + +pub fn needs_custom_http_transport(config: &Config) -> CargoResult { + let proxy_exists = http_proxy_exists(config)?; + let timeout = HttpTimeout::new(config)?.is_non_default(); + let cainfo = config.get_path("http.cainfo")?; + let check_revoke = config.get_bool("http.check-revoke")?; + let user_agent = config.get_string("http.user-agent")?; + + Ok(proxy_exists + || timeout + || cainfo.is_some() + || check_revoke.is_some() + || user_agent.is_some()) +} + +/// Configure a libcurl http handle with the defaults options for Cargo +pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult { + if let Some(proxy) = http_proxy(config)? { + handle.proxy(&proxy)?; + } + if let Some(cainfo) = config.get_path("http.cainfo")? { + handle.cainfo(&cainfo.val)?; + } + if let Some(check) = config.get_bool("http.check-revoke")? { + handle.ssl_options(SslOpt::new().no_revoke(!check.val))?; + } + if let Some(user_agent) = config.get_string("http.user-agent")? { + handle.useragent(&user_agent.val)?; + } else { + handle.useragent(&version().to_string())?; + } + + if let Some(true) = config.get::>("http.debug")? { + handle.verbose(true)?; + handle.debug_function(|kind, data| { + let (prefix, level) = match kind { + InfoType::Text => ("*", Level::Debug), + InfoType::HeaderIn => ("<", Level::Debug), + InfoType::HeaderOut => (">", Level::Debug), + InfoType::DataIn => ("{", Level::Trace), + InfoType::DataOut => ("}", Level::Trace), + InfoType::SslDataIn | InfoType::SslDataOut => return, + _ => return, + }; + match str::from_utf8(data) { + Ok(s) => { + for line in s.lines() { + log!(level, "http-debug: {} {}", prefix, line); + } + } + Err(_) => { + log!( + level, + "http-debug: {} ({} bytes of data)", + prefix, + data.len() + ); + } + } + })?; + } + + HttpTimeout::new(config) +} + +#[must_use] +pub struct HttpTimeout { + pub dur: Duration, + pub low_speed_limit: u32, } -/// Find an explicit HTTP proxy if one is available. +impl HttpTimeout { + pub fn new(config: &Config) -> CargoResult { + let low_speed_limit = config + .get::>("http.low-speed-limit")? + .unwrap_or(10); + let seconds = config + .get::>("http.timeout")? + .or_else(|| env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) + .unwrap_or(30); + Ok(HttpTimeout { + dur: Duration::new(seconds, 0), + low_speed_limit, + }) + } + + fn is_non_default(&self) -> bool { + self.dur != Duration::new(30, 0) || self.low_speed_limit != 10 + } + + pub fn configure(&self, handle: &mut Easy) -> CargoResult<()> { + // The timeout option for libcurl by default times out the entire + // transfer, but we probably don't want this. Instead we only set + // timeouts for the connect phase as well as a "low speed" timeout so + // if we don't receive many bytes in a large-ish period of time then we + // time out. + handle.connect_timeout(self.dur)?; + handle.low_speed_time(self.dur)?; + handle.low_speed_limit(self.low_speed_limit)?; + Ok(()) + } +} + +/// Finds an explicit HTTP proxy if one is available. /// /// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified /// via environment variables are picked up by libcurl. fn http_proxy(config: &Config) -> CargoResult> { - match try!(config.get_string("http.proxy")) { - Some((s, _)) => return Ok(Some(s)), - None => {} + if let Some(s) = config.get_string("http.proxy")? { + return Ok(Some(s.val)); } - match git2::Config::open_default() { - Ok(cfg) => { - match cfg.get_str("http.proxy") { - Ok(s) => return Ok(Some(s.to_string())), - Err(..) => {} - } + if let Ok(cfg) = git2::Config::open_default() { + if let Ok(s) = cfg.get_str("http.proxy") { + return Ok(Some(s.to_string())); } - Err(..) => {} } Ok(None) } @@ -213,41 +518,65 @@ fn http_proxy(config: &Config) -> CargoResult> { /// /// * cargo's `http.proxy` /// * git's `http.proxy` -/// * http_proxy env var -/// * HTTP_PROXY env var -/// * https_proxy env var -/// * HTTPS_PROXY env var -pub fn http_proxy_exists(config: &Config) -> CargoResult { - if try!(http_proxy(config)).is_some() { +/// * `http_proxy` env var +/// * `HTTP_PROXY` env var +/// * `https_proxy` env var +/// * `HTTPS_PROXY` env var +fn http_proxy_exists(config: &Config) -> CargoResult { + if http_proxy(config)?.is_some() { Ok(true) } else { - Ok(["http_proxy", "HTTP_PROXY", - "https_proxy", "HTTPS_PROXY"].iter().any(|v| env::var(v).is_ok())) + Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"] + .iter() + .any(|v| env::var(v).is_ok())) } } -pub fn http_timeout(config: &Config) -> CargoResult> { - match try!(config.get_i64("http.timeout")) { - Some((s, _)) => return Ok(Some(s)), - None => {} - } - Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) -} +pub fn registry_login( + config: &Config, + token: Option, + reg: Option, +) -> CargoResult<()> { + let (registry, _) = registry(config, token.clone(), None, reg.clone(), false, false)?; + + let token = match token { + Some(token) => token, + None => { + println!( + "please visit {}/me and paste the API Token below", + registry.host() + ); + let mut line = String::new(); + let input = io::stdin(); + input + .lock() + .read_line(&mut line) + .chain_err(|| "failed to read stdin") + .map_err(failure::Error::from)?; + line.trim().to_string() + } + }; + + let RegistryConfig { + token: old_token, .. + } = registry_configuration(config, reg.clone())?; -pub fn registry_login(config: &Config, token: String) -> CargoResult<()> { - let RegistryConfig { index, token: _ } = try!(registry_configuration(config)); - let mut map = HashMap::new(); - let p = config.cwd().to_path_buf(); - match index { - Some(index) => { - map.insert("index".to_string(), ConfigValue::String(index, p.clone())); + if let Some(old_token) = old_token { + if old_token == token { + config.shell().status("Login", "already logged in")?; + return Ok(()); } - None => {} } - map.insert("token".to_string(), ConfigValue::String(token, p)); - config::set_config(config, Location::Global, "registry", - ConfigValue::Table(map, PathBuf::from("."))) + config::save_credentials(config, token, reg.clone())?; + config.shell().status( + "Login", + format!( + "token for `{}` saved", + reg.as_ref().map_or("crates.io", String::as_str) + ), + )?; + Ok(()) } pub struct OwnersOptions { @@ -257,59 +586,57 @@ pub struct OwnersOptions { pub to_add: Option>, pub to_remove: Option>, pub list: bool, + pub registry: Option, } pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { let name = match opts.krate { Some(ref name) => name.clone(), None => { - let manifest_path = try!(find_root_manifest_for_cwd(None)); - let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), - config)); - try!(src.update()); - let pkg = try!(src.root_package()); - pkg.name().to_string() + let manifest_path = find_root_manifest_for_wd(config.cwd())?; + let ws = Workspace::new(&manifest_path, config)?; + ws.current()?.package_id().name().to_string() } }; - let (mut registry, _) = try!(registry(config, opts.token.clone(), - opts.index.clone())); - - match opts.to_add { - Some(ref v) => { - let v = v.iter().map(|s| &s[..]).collect::>(); - try!(config.shell().status("Owner", format!("adding {:?} to crate {}", - v, name))); - try!(registry.add_owners(&name, &v).map_err(|e| { - human(format!("failed to add owners to crate {}: {}", name, e)) - })); - } - None => {} + let (mut registry, _) = registry( + config, + opts.token.clone(), + opts.index.clone(), + opts.registry.clone(), + true, + true, + )?; + + if let Some(ref v) = opts.to_add { + let v = v.iter().map(|s| &s[..]).collect::>(); + let msg = registry + .add_owners(&name, &v) + .map_err(|e| format_err!("failed to invite owners to crate {}: {}", name, e))?; + + config.shell().status("Owner", msg)?; } - match opts.to_remove { - Some(ref v) => { - let v = v.iter().map(|s| &s[..]).collect::>(); - try!(config.shell().status("Owner", format!("removing {:?} from crate {}", - v, name))); - try!(registry.remove_owners(&name, &v).map_err(|e| { - human(format!("failed to remove owners from crate {}: {}", name, e)) - })); - } - None => {} + if let Some(ref v) = opts.to_remove { + let v = v.iter().map(|s| &s[..]).collect::>(); + config + .shell() + .status("Owner", format!("removing {:?} from crate {}", v, name))?; + registry + .remove_owners(&name, &v) + .chain_err(|| format!("failed to remove owners from crate {}", name))?; } if opts.list { - let owners = try!(registry.list_owners(&name).map_err(|e| { - human(format!("failed to list owners of crate {}: {}", name, e)) - })); + let owners = registry + .list_owners(&name) + .chain_err(|| format!("failed to list owners of crate {}", name))?; for owner in owners.iter() { print!("{}", owner.login); match (owner.name.as_ref(), owner.email.as_ref()) { (Some(name), Some(email)) => println!(" ({} <{}>)", name, email), - (Some(s), None) | - (None, Some(s)) => println!(" ({})", s), - (None, None) => println!(""), + (Some(s), None) | (None, Some(s)) => println!(" ({})", s), + (None, None) => println!(), } } } @@ -317,81 +644,134 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { Ok(()) } -pub fn yank(config: &Config, - krate: Option, - version: Option, - token: Option, - index: Option, - undo: bool) -> CargoResult<()> { +pub fn yank( + config: &Config, + krate: Option, + version: Option, + token: Option, + index: Option, + undo: bool, + reg: Option, +) -> CargoResult<()> { let name = match krate { Some(name) => name, None => { - let manifest_path = try!(find_root_manifest_for_cwd(None)); - let mut src = try!(PathSource::for_path(manifest_path.parent().unwrap(), - config)); - try!(src.update()); - let pkg = try!(src.root_package()); - pkg.name().to_string() + let manifest_path = find_root_manifest_for_wd(config.cwd())?; + let ws = Workspace::new(&manifest_path, config)?; + ws.current()?.package_id().name().to_string() } }; let version = match version { Some(v) => v, - None => return Err(human("a version must be specified to yank")) + None => bail!("a version must be specified to yank"), }; - let (mut registry, _) = try!(registry(config, token, index)); + let (mut registry, _) = registry(config, token, index, reg, true, true)?; if undo { - try!(config.shell().status("Unyank", format!("{}:{}", name, version))); - try!(registry.unyank(&name, &version).map_err(|e| { - human(format!("failed to undo a yank: {}", e)) - })); + config + .shell() + .status("Unyank", format!("{}:{}", name, version))?; + registry + .unyank(&name, &version) + .chain_err(|| "failed to undo a yank")?; } else { - try!(config.shell().status("Yank", format!("{}:{}", name, version))); - try!(registry.yank(&name, &version).map_err(|e| { - human(format!("failed to yank: {}", e)) - })); + config + .shell() + .status("Yank", format!("{}:{}", name, version))?; + registry + .yank(&name, &version) + .chain_err(|| "failed to yank")?; } Ok(()) } -pub fn search(query: &str, config: &Config, index: Option) -> CargoResult<()> { - fn truncate_with_ellipsis(s: &str, max_length: usize) -> String { - if s.len() < max_length { - s.to_string() - } else { - format!("{}…", &s[..max_length - 1]) +fn get_source_id( + config: &Config, + index: Option, + reg: Option, +) -> CargoResult { + match (reg, index) { + (Some(r), _) => SourceId::alt_registry(config, &r), + (_, Some(i)) => SourceId::for_registry(&i.into_url()?), + _ => { + let map = SourceConfigMap::new(config)?; + let src = map.load(SourceId::crates_io(config)?, &HashSet::new())?; + Ok(src.replaced_source_id()) } } +} - let (mut registry, _) = try!(registry(config, None, index)); - let crates = try!(registry.search(query).map_err(|e| { - human(format!("failed to retrieve search results from the registry: {}", e)) - })); - - let list_items = crates.iter() - .map(|krate| ( - format!("{} ({})", krate.name, krate.max_version), - krate.description.as_ref().map(|desc| - truncate_with_ellipsis(&desc.replace("\n", " "), 128)) - )) - .collect::>(); - let description_margin = list_items.iter() - .map(|&(ref left, _)| left.len() + 4) - .max() - .unwrap_or(0); - - for (name, description) in list_items.into_iter() { +pub fn search( + query: &str, + config: &Config, + index: Option, + limit: u32, + reg: Option, +) -> CargoResult<()> { + fn truncate_with_ellipsis(s: &str, max_width: usize) -> String { + // We should truncate at grapheme-boundary and compute character-widths, + // yet the dependencies on unicode-segmentation and unicode-width are + // not worth it. + let mut chars = s.chars(); + let mut prefix = (&mut chars).take(max_width - 1).collect::(); + if chars.next().is_some() { + prefix.push('…'); + } + prefix + } + + let (mut registry, source_id) = registry(config, None, index, reg, false, false)?; + let (crates, total_crates) = registry + .search(query, limit) + .chain_err(|| "failed to retrieve search results from the registry")?; + + let names = crates + .iter() + .map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version)) + .collect::>(); + + let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default(); + + let description_length = cmp::max(80, 128 - description_margin); + + let descriptions = crates.iter().map(|krate| { + krate + .description + .as_ref() + .map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length)) + }); + + for (name, description) in names.into_iter().zip(descriptions) { let line = match description { Some(desc) => { - let space = repeat(' ').take(description_margin - name.len()) - .collect::(); - name.to_string() + &space + &desc + let space = repeat(' ') + .take(description_margin - name.len()) + .collect::(); + name + &space + "# " + &desc } - None => name + None => name, + }; + println!("{}", line); + } + + let search_max_limit = 100; + if total_crates > limit && limit < search_max_limit { + println!( + "... and {} crates more (use --limit N to see more)", + total_crates - limit + ); + } else if total_crates > limit && limit >= search_max_limit { + let extra = if source_id.is_default_registry() { + format!( + " (go to https://crates.io/search?q={} to see more)", + percent_encode(query.as_bytes(), NON_ALPHANUMERIC) + ) + } else { + String::new() }; - try!(config.shell().say(line, BLACK)); + println!("... and {} crates more{}", total_crates - limit, extra); } Ok(()) diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 7f17321bd7d..98e1a70878d 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -1,122 +1,599 @@ -use std::collections::{HashMap, HashSet}; +//! High-level APIs for executing the resolver. +//! +//! This module provides functions for running the resolver given a workspace. +//! There are roughly 3 main functions: +//! +//! - `resolve_ws`: A simple, high-level function with no options. +//! - `resolve_ws_with_opts`: A medium-level function with options like +//! user-provided features. This is the most appropriate function to use in +//! most cases. +//! - `resolve_with_previous`: A low-level function for running the resolver, +//! providing the most power and flexibility. -use core::{Package, PackageId, SourceId}; -use core::registry::PackageRegistry; -use core::resolver::{self, Resolve, Method}; -use ops; -use util::CargoResult; +use std::collections::HashSet; +use std::rc::Rc; -/// Resolve all dependencies for the specified `package` using the previous -/// lockfile as a guide if present. +use log::{debug, trace}; + +use crate::core::registry::PackageRegistry; +use crate::core::resolver::{self, Resolve, ResolveOpts}; +use crate::core::Feature; +use crate::core::{PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace}; +use crate::ops; +use crate::sources::PathSource; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::profile; + +const UNUSED_PATCH_WARNING: &str = "\ +Check that the patched package version and available features are compatible +with the dependency requirements. If the patch has a different version from +what is locked in the Cargo.lock file, run `cargo update` to use the new +version. This may also occur with an optional dependency that is not enabled."; + +/// Resolves all dependencies for the workspace using the previous +/// lock file as a guide if present. +/// +/// This function will also write the result of resolution as a new lock file +/// (unless it is an ephemeral workspace such as `cargo install` or `cargo +/// package`). +/// +/// This is a simple interface used by commands like `clean`, `fetch`, and +/// `package`, which don't specify any options or features. +pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> { + let mut registry = PackageRegistry::new(ws.config())?; + let resolve = resolve_with_registry(ws, &mut registry)?; + let packages = get_resolved_packages(&resolve, registry)?; + Ok((packages, resolve)) +} + +/// Resolves dependencies for some packages of the workspace, +/// taking into account `paths` overrides and activated features. +/// +/// This function will also write the result of resolution as a new lock file +/// (unless `Workspace::require_optional_deps` is false, such as `cargo +/// install` or `-Z avoid-dev-deps`), or it is an ephemeral workspace (`cargo +/// install` or `cargo package`). /// -/// This function will also generate a write the result of resolution as a new -/// lockfile. -pub fn resolve_pkg(registry: &mut PackageRegistry, package: &Package) - -> CargoResult { - let prev = try!(ops::load_pkg_lockfile(package)); - let resolve = try!(resolve_with_previous(registry, package, - Method::Everything, - prev.as_ref(), None)); - try!(ops::write_pkg_lockfile(package, &resolve)); +/// `specs` may be empty, which indicates it should resolve all workspace +/// members. In this case, `opts.all_features` must be `true`. +pub fn resolve_ws_with_opts<'a>( + ws: &Workspace<'a>, + opts: ResolveOpts, + specs: &[PackageIdSpec], +) -> CargoResult<(PackageSet<'a>, Resolve)> { + let mut registry = PackageRegistry::new(ws.config())?; + let mut add_patches = true; + + let resolve = if ws.ignore_lock() { + None + } else if ws.require_optional_deps() { + // First, resolve the root_package's *listed* dependencies, as well as + // downloading and updating all remotes and such. + let resolve = resolve_with_registry(ws, &mut registry)?; + // No need to add patches again, `resolve_with_registry` has done it. + add_patches = false; + + // Second, resolve with precisely what we're doing. Filter out + // transitive dependencies if necessary, specify features, handle + // overrides, etc. + let _p = profile::start("resolving with overrides..."); + + add_overrides(&mut registry, ws)?; + + for &(ref replace_spec, ref dep) in ws.root_replace() { + if !resolve + .iter() + .any(|r| replace_spec.matches(r) && !dep.matches_id(r)) + { + ws.config() + .shell() + .warn(format!("package replacement is not used: {}", replace_spec))? + } + } + + Some(resolve) + } else { + ops::load_pkg_lockfile(ws)? + }; + + let resolved_with_overrides = resolve_with_previous( + &mut registry, + ws, + opts, + resolve.as_ref(), + None, + specs, + add_patches, + )?; + + let packages = get_resolved_packages(&resolved_with_overrides, registry)?; + + Ok((packages, resolved_with_overrides)) +} + +fn resolve_with_registry<'cfg>( + ws: &Workspace<'cfg>, + registry: &mut PackageRegistry<'cfg>, +) -> CargoResult { + let prev = ops::load_pkg_lockfile(ws)?; + let resolve = resolve_with_previous( + registry, + ws, + ResolveOpts::everything(), + prev.as_ref(), + None, + &[], + true, + )?; + + if !ws.is_ephemeral() { + ops::write_pkg_lockfile(ws, &resolve)?; + } Ok(resolve) } -/// Resolve all dependencies for a package using an optional previous instance +/// Resolves all dependencies for a package using an optional previous instance. /// of resolve to guide the resolution process. /// /// This also takes an optional hash set, `to_avoid`, which is a list of package -/// ids that should be avoided when consulting the previous instance of resolve +/// IDs that should be avoided when consulting the previous instance of resolve /// (often used in pairings with updates). /// -/// The previous resolve normally comes from a lockfile. This function does not -/// read or write lockfiles from the filesystem. -pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, - package: &Package, - method: Method, - previous: Option<&'a Resolve>, - to_avoid: Option<&HashSet<&'a PackageId>>) - -> CargoResult { +/// The previous resolve normally comes from a lock file. This function does not +/// read or write lock files from the filesystem. +/// +/// `specs` may be empty, which indicates it should resolve all workspace +/// members. In this case, `opts.all_features` must be `true`. +/// +/// If `register_patches` is true, then entries from the `[patch]` table in +/// the manifest will be added to the given `PackageRegistry`. +pub fn resolve_with_previous<'cfg>( + registry: &mut PackageRegistry<'cfg>, + ws: &Workspace<'cfg>, + opts: ResolveOpts, + previous: Option<&Resolve>, + to_avoid: Option<&HashSet>, + specs: &[PackageIdSpec], + register_patches: bool, +) -> CargoResult { + assert!( + !specs.is_empty() || opts.all_features, + "no specs requires all_features" + ); + + // We only want one Cargo at a time resolving a crate graph since this can + // involve a lot of frobbing of the global caches. + let _lock = ws.config().acquire_package_cache_lock()?; // Here we place an artificial limitation that all non-registry sources - // cannot be locked at more than one revision. This means that if a git + // cannot be locked at more than one revision. This means that if a Git // repository provides more than one package, they must all be updated in // step when any of them are updated. // - // TODO: This seems like a hokey reason to single out the registry as being - // different - let mut to_avoid_sources = HashSet::new(); - match to_avoid { - Some(set) => { - for package_id in set.iter() { - let source = package_id.source_id(); - if !source.is_registry() { - to_avoid_sources.insert(source); - } + // TODO: this seems like a hokey reason to single out the registry as being + // different. + let mut to_avoid_sources: HashSet = HashSet::new(); + if let Some(to_avoid) = to_avoid { + to_avoid_sources.extend( + to_avoid + .iter() + .map(|p| p.source_id()) + .filter(|s| !s.is_registry()), + ); + } + + let keep = |p: &PackageId| { + !to_avoid_sources.contains(&p.source_id()) + && match to_avoid { + Some(set) => !set.contains(p), + None => true, } + }; + + // In the case where a previous instance of resolve is available, we + // want to lock as many packages as possible to the previous version + // without disturbing the graph structure. + let mut try_to_use = HashSet::new(); + if let Some(r) = previous { + trace!("previous: {:?}", r); + register_previous_locks(ws, registry, r, &keep); + + // Everything in the previous lock file we want to keep is prioritized + // in dependency selection if it comes up, aka we want to have + // conservative updates. + try_to_use.extend(r.iter().filter(keep).inspect(|id| { + debug!("attempting to prefer {}", id); + })); + } + + if register_patches { + for (url, patches) in ws.root_patch() { + let previous = match previous { + Some(r) => r, + None => { + registry.patch(url, patches)?; + continue; + } + }; + let patches = patches + .iter() + .map(|dep| { + let unused = previous.unused_patches().iter().cloned(); + let candidates = previous.iter().chain(unused); + match candidates.filter(keep).find(|&id| dep.matches_id(id)) { + Some(id) => { + let mut dep = dep.clone(); + dep.lock_to(id); + dep + } + None => dep.clone(), + } + }) + .collect::>(); + registry.patch(url, &patches)?; } - None => {} + + registry.lock_patches(); } - let summary = package.summary().clone(); - let summary = match previous { - Some(r) => { - // In the case where a previous instance of resolve is available, we - // want to lock as many packages as possible to the previous version - // without disturbing the graph structure. To this end we perform - // two actions here: - // - // 1. We inform the package registry of all locked packages. This - // involves informing it of both the locked package's id as well - // as the versions of all locked dependencies. The registry will - // then takes this information into account when it is queried. - // - // 2. The specified package's summary will have its dependencies - // modified to their precise variants. This will instruct the - // first step of the resolution process to not query for ranges - // but rather precise dependency versions. - // - // This process must handle altered dependencies, however, as - // it's possible for a manifest to change over time to have - // dependencies added, removed, or modified to different version - // ranges. To deal with this, we only actually lock a dependency - // to the previously resolved version if the dependency listed - // still matches the locked version. - for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) { - let deps = r.deps(node).into_iter().flat_map(|i| i) - .filter(|p| keep(p, to_avoid, &to_avoid_sources)) - .map(|p| p.clone()).collect(); - registry.register_lock(node.clone(), deps); + for member in ws.members() { + registry.add_sources(Some(member.package_id().source_id()))?; + } + + let mut summaries = Vec::new(); + if ws.config().cli_unstable().package_features { + let mut members = Vec::new(); + if specs.is_empty() { + members.extend(ws.members()); + } else { + if specs.len() > 1 && !opts.features.is_empty() { + failure::bail!("cannot specify features for more than one package"); + } + members.extend( + ws.members() + .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))), + ); + // Edge case: running `cargo build -p foo`, where `foo` is not a member + // of current workspace. Add all packages from workspace to get `foo` + // into the resolution graph. + if members.is_empty() { + if !(opts.features.is_empty() && !opts.all_features && opts.uses_default_features) { + failure::bail!("cannot specify features for packages outside of workspace"); + } + members.extend(ws.members()); + panic!("tested?"); } + } + for member in members { + let summary = registry.lock(member.summary().clone()); + summaries.push((summary, opts.clone())) + } + } else { + for member in ws.members() { + let summary_resolve_opts = if specs.is_empty() { + // When resolving the entire workspace, resolve each member + // with all features enabled. + opts.clone() + } else { + // If we're not resolving everything though then we're constructing the + // exact crate graph we're going to build. Here we don't necessarily + // want to keep around all workspace crates as they may not all be + // built/tested. + // + // Additionally, the `opts` specified represents command line + // flags, which really only matters for the current package + // (determined by the cwd). If other packages are specified (via + // `-p`) then the command line flags like features don't apply to + // them. + // + // As a result, if this `member` is the current member of the + // workspace, then we use `opts` specified. Otherwise we use a + // base `opts` with no features specified but using default features + // for any other packages specified with `-p`. + let member_id = member.package_id(); + match ws.current_opt() { + Some(current) if member_id == current.package_id() => opts.clone(), + _ => { + if specs.iter().any(|spec| spec.matches(member_id)) { + // -p for a workspace member that is not the + // "current" one, don't use the local `--features`. + ResolveOpts { + dev_deps: opts.dev_deps, + features: Rc::default(), + all_features: opts.all_features, + uses_default_features: true, + } + } else { + // `-p` for non-member, skip. + continue; + } + } + } + }; + + let summary = registry.lock(member.summary().clone()); + summaries.push((summary, summary_resolve_opts)); + } + }; + + let root_replace = ws.root_replace(); - let map = r.deps(r.root()).into_iter().flat_map(|i| i).filter(|p| { - keep(p, to_avoid, &to_avoid_sources) - }).map(|d| { - (d.name(), d) - }).collect::>(); - summary.map_dependencies(|d| { - match map.get(d.name()) { - Some(&lock) if d.matches_id(lock) => d.lock_to(lock), - _ => d, + let replace = match previous { + Some(r) => root_replace + .iter() + .map(|&(ref spec, ref dep)| { + for (&key, &val) in r.replacements().iter() { + if spec.matches(key) && dep.matches_id(val) && keep(&val) { + let mut dep = dep.clone(); + dep.lock_to(val); + return (spec.clone(), dep); + } } + (spec.clone(), dep.clone()) }) + .collect::>(), + None => root_replace.to_vec(), + }; + + ws.preload(registry); + let mut resolved = resolver::resolve( + &summaries, + &replace, + registry, + &try_to_use, + Some(ws.config()), + ws.features().require(Feature::public_dependency()).is_ok(), + )?; + resolved.register_used_patches(registry.patches()); + if register_patches { + // It would be good if this warning was more targeted and helpful + // (such as showing close candidates that failed to match). However, + // that's not terribly easy to do, so just show a general help + // message. + let warnings: Vec = resolved + .unused_patches() + .iter() + .map(|pkgid| format!("Patch `{}` was not used in the crate graph.", pkgid)) + .collect(); + if !warnings.is_empty() { + ws.config().shell().warn(format!( + "{}\n{}", + warnings.join("\n"), + UNUSED_PATCH_WARNING + ))?; + } + } + if let Some(previous) = previous { + resolved.merge_from(previous)?; + } + Ok(resolved) +} + +/// Read the `paths` configuration variable to discover all path overrides that +/// have been configured. +pub fn add_overrides<'a>( + registry: &mut PackageRegistry<'a>, + ws: &Workspace<'a>, +) -> CargoResult<()> { + let paths = match ws.config().get_list("paths")? { + Some(list) => list, + None => return Ok(()), + }; + + let paths = paths.val.iter().map(|&(ref s, ref p)| { + // The path listed next to the string is the config file in which the + // key was located, so we want to pop off the `.cargo/config` component + // to get the directory containing the `.cargo` folder. + (p.parent().unwrap().parent().unwrap().join(s), p) + }); + + for (path, definition) in paths { + let id = SourceId::for_path(&path)?; + let mut source = PathSource::new_recursive(&path, id, ws.config()); + source.update().chain_err(|| { + format!( + "failed to update path override `{}` \ + (defined in `{}`)", + path.display(), + definition.display() + ) + })?; + registry.add_override(Box::new(source)); + } + Ok(()) +} + +pub fn get_resolved_packages<'a>( + resolve: &Resolve, + registry: PackageRegistry<'a>, +) -> CargoResult> { + let ids: Vec = resolve.iter().collect(); + registry.get(&ids) +} + +/// In this function we're responsible for informing the `registry` of all +/// locked dependencies from the previous lock file we had, `resolve`. +/// +/// This gets particularly tricky for a couple of reasons. The first is that we +/// want all updates to be conservative, so we actually want to take the +/// `resolve` into account (and avoid unnecessary registry updates and such). +/// the second, however, is that we want to be resilient to updates of +/// manifests. For example if a dependency is added or a version is changed we +/// want to make sure that we properly re-resolve (conservatively) instead of +/// providing an opaque error. +/// +/// The logic here is somewhat subtle, but there should be more comments below to +/// clarify things. +/// +/// Note that this function, at the time of this writing, is basically the +/// entire fix for issue #4127. +fn register_previous_locks( + ws: &Workspace<'_>, + registry: &mut PackageRegistry<'_>, + resolve: &Resolve, + keep: &dyn Fn(&PackageId) -> bool, +) { + let path_pkg = |id: SourceId| { + if !id.is_path() { + return None; + } + if let Ok(path) = id.url().to_file_path() { + if let Ok(pkg) = ws.load(&path.join("Cargo.toml")) { + return Some(pkg); + } } - None => summary, + None }; - let mut resolved = try!(resolver::resolve(&summary, &method, registry)); - match previous { - Some(r) => resolved.copy_metadata(r), - None => {} + // Ok so we've been passed in a `keep` function which basically says "if I + // return `true` then this package wasn't listed for an update on the command + // line". That is, if we run `cargo update -p foo` then `keep(bar)` will return + // `true`, whereas `keep(foo)` will return `false` (roughly speaking). + // + // This isn't actually quite what we want, however. Instead we want to + // further refine this `keep` function with *all transitive dependencies* of + // the packages we're not keeping. For example, consider a case like this: + // + // * There's a crate `log`. + // * There's a crate `serde` which depends on `log`. + // + // Let's say we then run `cargo update -p serde`. This may *also* want to + // update the `log` dependency as our newer version of `serde` may have a + // new minimum version required for `log`. Now this isn't always guaranteed + // to work. What'll happen here is we *won't* lock the `log` dependency nor + // the `log` crate itself, but we will inform the registry "please prefer + // this version of `log`". That way if our newer version of serde works with + // the older version of `log`, we conservatively won't update `log`. If, + // however, nothing else in the dependency graph depends on `log` and the + // newer version of `serde` requires a new version of `log` it'll get pulled + // in (as we didn't accidentally lock it to an old version). + // + // Additionally, here we process all path dependencies listed in the previous + // resolve. They can not only have their dependencies change but also + // the versions of the package change as well. If this ends up happening + // then we want to make sure we don't lock a package ID node that doesn't + // actually exist. Note that we don't do transitive visits of all the + // package's dependencies here as that'll be covered below to poison those + // if they changed. + let mut avoid_locking = HashSet::new(); + registry.add_to_yanked_whitelist(resolve.iter().filter(keep)); + for node in resolve.iter() { + if !keep(&node) { + add_deps(resolve, node, &mut avoid_locking); + } else if let Some(pkg) = path_pkg(node.source_id()) { + if pkg.package_id() != node { + avoid_locking.insert(node); + } + } + } + + // Ok, but the above loop isn't the entire story! Updates to the dependency + // graph can come from two locations, the `cargo update` command or + // manifests themselves. For example a manifest on the filesystem may + // have been updated to have an updated version requirement on `serde`. In + // this case both `keep(serde)` and `keep(log)` return `true` (the `keep` + // that's an argument to this function). We, however, don't want to keep + // either of those! Otherwise we'll get obscure resolve errors about locked + // versions. + // + // To solve this problem we iterate over all packages with path sources + // (aka ones with manifests that are changing) and take a look at all of + // their dependencies. If any dependency does not match something in the + // previous lock file, then we're guaranteed that the main resolver will + // update the source of this dependency no matter what. Knowing this we + // poison all packages from the same source, forcing them all to get + // updated. + // + // This may seem like a heavy hammer, and it is! It means that if you change + // anything from crates.io then all of crates.io becomes unlocked. Note, + // however, that we still want conservative updates. This currently happens + // because the first candidate the resolver picks is the previously locked + // version, and only if that fails to activate to we move on and try + // a different version. (giving the guise of conservative updates) + // + // For example let's say we had `serde = "0.1"` written in our lock file. + // When we later edit this to `serde = "0.1.3"` we don't want to lock serde + // at its old version, 0.1.1. Instead we want to allow it to update to + // `0.1.3` and update its own dependencies (like above). To do this *all + // crates from crates.io* are not locked (aka added to `avoid_locking`). + // For dependencies like `log` their previous version in the lock file will + // come up first before newer version, if newer version are available. + let mut path_deps = ws.members().cloned().collect::>(); + let mut visited = HashSet::new(); + while let Some(member) = path_deps.pop() { + if !visited.insert(member.package_id()) { + continue; + } + let is_ws_member = ws.is_member(&member); + for dep in member.dependencies() { + // If this dependency didn't match anything special then we may want + // to poison the source as it may have been added. If this path + // dependencies is **not** a workspace member, however, and it's an + // optional/non-transitive dependency then it won't be necessarily + // be in our lock file. If this shows up then we avoid poisoning + // this source as otherwise we'd repeatedly update the registry. + // + // TODO: this breaks adding an optional dependency in a + // non-workspace member and then simultaneously editing the + // dependency on that crate to enable the feature. For now, + // this bug is better than the always-updating registry though. + if !is_ws_member && (dep.is_optional() || !dep.is_transitive()) { + continue; + } + + // If this is a path dependency, then try to push it onto our + // worklist. + if let Some(pkg) = path_pkg(dep.source_id()) { + path_deps.push(pkg); + continue; + } + + // If we match *anything* in the dependency graph then we consider + // ourselves all ok, and assume that we'll resolve to that. + if resolve.iter().any(|id| dep.matches_ignoring_source(id)) { + continue; + } + + // Ok if nothing matches, then we poison the source of these + // dependencies and the previous lock file. + debug!( + "poisoning {} because {} looks like it changed {}", + dep.source_id(), + member.package_id(), + dep.package_name() + ); + for id in resolve + .iter() + .filter(|id| id.source_id() == dep.source_id()) + { + add_deps(resolve, id, &mut avoid_locking); + } + } + } + + // Alright now that we've got our new, fresh, shiny, and refined `keep` + // function let's put it to action. Take a look at the previous lock file, + // filter everything by this callback, and then shove everything else into + // the registry as a locked dependency. + let keep = |id: &PackageId| keep(id) && !avoid_locking.contains(id); + + for node in resolve.iter().filter(keep) { + let deps = resolve + .deps_not_replaced(node) + .map(|p| p.0) + .filter(keep) + .collect(); + registry.register_lock(node, deps); } - return Ok(resolved); - - fn keep<'a>(p: &&'a PackageId, - to_avoid_packages: Option<&HashSet<&'a PackageId>>, - to_avoid_sources: &HashSet<&'a SourceId>) - -> bool { - !to_avoid_sources.contains(&p.source_id()) && match to_avoid_packages { - Some(set) => !set.contains(p), - None => true, + + /// Recursively add `node` and all its transitive dependencies to `set`. + fn add_deps(resolve: &Resolve, node: PackageId, set: &mut HashSet) { + if !set.insert(node) { + return; + } + debug!("ignoring any lock pointing directly at {}", node); + for (dep, _) in resolve.deps_not_replaced(node) { + add_deps(resolve, dep, set); } } } diff --git a/src/cargo/ops/vendor.rs b/src/cargo/ops/vendor.rs new file mode 100644 index 00000000000..39ee0044545 --- /dev/null +++ b/src/cargo/ops/vendor.rs @@ -0,0 +1,327 @@ +use crate::core::shell::Verbosity; +use crate::core::{GitReference, Workspace}; +use crate::ops; +use crate::sources::path::PathSource; +use crate::util::Sha256; +use crate::util::{paths, CargoResult, CargoResultExt, Config}; +use failure::bail; +use serde::Serialize; +use std::collections::HashSet; +use std::collections::{BTreeMap, BTreeSet, HashMap}; +use std::fs::{self, File}; +use std::io::Write; +use std::path::{Path, PathBuf}; + +pub struct VendorOptions<'a> { + pub no_delete: bool, + pub destination: &'a Path, + pub extra: Vec, +} + +pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> { + let mut extra_workspaces = Vec::new(); + for extra in opts.extra.iter() { + let extra = ws.config().cwd().join(extra); + let ws = Workspace::new(&extra, ws.config())?; + extra_workspaces.push(ws); + } + let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::>(); + let vendor_config = + sync(ws.config(), &workspaces, opts).chain_err(|| "failed to sync".to_string())?; + + let shell = ws.config().shell(); + if shell.verbosity() != Verbosity::Quiet { + eprint!("To use vendored sources, add this to your .cargo/config for this project:\n\n"); + print!("{}", &toml::to_string(&vendor_config).unwrap()); + } + + Ok(()) +} + +#[derive(Serialize)] +struct VendorConfig { + source: BTreeMap, +} + +#[derive(Serialize)] +#[serde(rename_all = "lowercase", untagged)] +enum VendorSource { + Directory { + directory: PathBuf, + }, + Registry { + registry: Option, + #[serde(rename = "replace-with")] + replace_with: String, + }, + Git { + git: String, + branch: Option, + tag: Option, + rev: Option, + #[serde(rename = "replace-with")] + replace_with: String, + }, +} + +fn sync( + config: &Config, + workspaces: &[&Workspace<'_>], + opts: &VendorOptions<'_>, +) -> CargoResult { + let canonical_destination = opts.destination.canonicalize(); + let canonical_destination = canonical_destination + .as_ref() + .map(|p| &**p) + .unwrap_or(opts.destination); + + fs::create_dir_all(&canonical_destination)?; + let mut to_remove = HashSet::new(); + if !opts.no_delete { + for entry in canonical_destination.read_dir()? { + let entry = entry?; + to_remove.insert(entry.path()); + } + } + + // First up attempt to work around rust-lang/cargo#5956. Apparently build + // artifacts sprout up in Cargo's global cache for whatever reason, although + // it's unsure what tool is causing these issues at this time. For now we + // apply a heavy-hammer approach which is to delete Cargo's unpacked version + // of each crate to start off with. After we do this we'll re-resolve and + // redownload again, which should trigger Cargo to re-extract all the + // crates. + // + // Note that errors are largely ignored here as this is a best-effort + // attempt. If anything fails here we basically just move on to the next + // crate to work with. + for ws in workspaces { + let (packages, resolve) = + ops::resolve_ws(ws).chain_err(|| "failed to load pkg lockfile")?; + + packages + .get_many(resolve.iter()) + .chain_err(|| "failed to download packages")?; + + for pkg in resolve.iter() { + // Don't delete actual source code! + if pkg.source_id().is_path() { + if let Ok(path) = pkg.source_id().url().to_file_path() { + if let Ok(path) = path.canonicalize() { + to_remove.remove(&path); + } + } + continue; + } + if pkg.source_id().is_git() { + continue; + } + if let Ok(pkg) = packages.get_one(pkg) { + drop(fs::remove_dir_all(pkg.manifest_path().parent().unwrap())); + } + } + } + + let mut checksums = HashMap::new(); + let mut ids = BTreeMap::new(); + + // Next up let's actually download all crates and start storing internal + // tables about them. + for ws in workspaces { + let (packages, resolve) = + ops::resolve_ws(ws).chain_err(|| "failed to load pkg lockfile")?; + + packages + .get_many(resolve.iter()) + .chain_err(|| "failed to download packages")?; + + for pkg in resolve.iter() { + // No need to vendor path crates since they're already in the + // repository + if pkg.source_id().is_path() { + continue; + } + ids.insert( + pkg, + packages + .get_one(pkg) + .chain_err(|| "failed to fetch package")? + .clone(), + ); + + checksums.insert(pkg, resolve.checksums().get(&pkg).cloned()); + } + } + + let mut versions = HashMap::new(); + for id in ids.keys() { + let map = versions.entry(id.name()).or_insert_with(BTreeMap::default); + if let Some(prev) = map.get(&id.version()) { + bail!( + "found duplicate version of package `{} v{}` \ + vendored from two sources:\n\ + \n\ + \tsource 1: {}\n\ + \tsource 2: {}", + id.name(), + id.version(), + prev, + id.source_id() + ); + } + map.insert(id.version(), id.source_id()); + } + + let mut sources = BTreeSet::new(); + for (id, pkg) in ids.iter() { + // Next up, copy it to the vendor directory + let src = pkg + .manifest_path() + .parent() + .expect("manifest_path should point to a file"); + let max_version = *versions[&id.name()].iter().rev().next().unwrap().0; + let dir_has_version_suffix = id.version() != max_version; + let dst_name = if dir_has_version_suffix { + // Eg vendor/futures-0.1.13 + format!("{}-{}", id.name(), id.version()) + } else { + // Eg vendor/futures + id.name().to_string() + }; + + sources.insert(id.source_id()); + let dst = canonical_destination.join(&dst_name); + to_remove.remove(&dst); + let cksum = dst.join(".cargo-checksum.json"); + if dir_has_version_suffix && cksum.exists() { + // Always re-copy directory without version suffix in case the version changed + continue; + } + + config.shell().status( + "Vendoring", + &format!("{} ({}) to {}", id, src.to_string_lossy(), dst.display()), + )?; + + let _ = fs::remove_dir_all(&dst); + let pathsource = PathSource::new(src, id.source_id(), config); + let paths = pathsource.list_files(pkg)?; + let mut map = BTreeMap::new(); + cp_sources(src, &paths, &dst, &mut map) + .chain_err(|| format!("failed to copy over vendored sources for: {}", id))?; + + // Finally, emit the metadata about this package + let json = serde_json::json!({ + "package": checksums.get(id), + "files": map, + }); + + File::create(&cksum)?.write_all(json.to_string().as_bytes())?; + } + + for path in to_remove { + if path.is_dir() { + paths::remove_dir_all(&path)?; + } else { + paths::remove_file(&path)?; + } + } + + // add our vendored source + let mut config = BTreeMap::new(); + + let merged_source_name = "vendored-sources"; + config.insert( + merged_source_name.to_string(), + VendorSource::Directory { + directory: canonical_destination.to_path_buf(), + }, + ); + + // replace original sources with vendor + for source_id in sources { + let name = if source_id.is_default_registry() { + "crates-io".to_string() + } else { + source_id.url().to_string() + }; + + let source = if source_id.is_default_registry() { + VendorSource::Registry { + registry: None, + replace_with: merged_source_name.to_string(), + } + } else if source_id.is_git() { + let mut branch = None; + let mut tag = None; + let mut rev = None; + if let Some(reference) = source_id.git_reference() { + match *reference { + GitReference::Branch(ref b) => branch = Some(b.clone()), + GitReference::Tag(ref t) => tag = Some(t.clone()), + GitReference::Rev(ref r) => rev = Some(r.clone()), + } + } + VendorSource::Git { + git: source_id.url().to_string(), + branch, + tag, + rev, + replace_with: merged_source_name.to_string(), + } + } else { + panic!("Invalid source ID: {}", source_id) + }; + config.insert(name, source); + } + + Ok(VendorConfig { source: config }) +} + +fn cp_sources( + src: &Path, + paths: &[PathBuf], + dst: &Path, + cksums: &mut BTreeMap, +) -> CargoResult<()> { + for p in paths { + let relative = p.strip_prefix(&src).unwrap(); + + match relative.to_str() { + // Skip git config files as they're not relevant to builds most of + // the time and if we respect them (e.g. in git) then it'll + // probably mess with the checksums when a vendor dir is checked + // into someone else's source control + Some(".gitattributes") | Some(".gitignore") | Some(".git") => continue, + + // Temporary Cargo files + Some(".cargo-ok") => continue, + + // Skip patch-style orig/rej files. Published crates on crates.io + // have `Cargo.toml.orig` which we don't want to use here and + // otherwise these are rarely used as part of the build process. + Some(filename) => { + if filename.ends_with(".orig") || filename.ends_with(".rej") { + continue; + } + } + _ => {} + }; + + // Join pathname components individually to make sure that the joined + // path uses the correct directory separators everywhere, since + // `relative` may use Unix-style and `dst` may require Windows-style + // backslashes. + let dst = relative + .iter() + .fold(dst.to_owned(), |acc, component| acc.join(&component)); + + fs::create_dir_all(dst.parent().unwrap())?; + + fs::copy(&p, &dst) + .chain_err(|| format!("failed to copy `{}` to `{}`", p.display(), dst.display()))?; + let cksum = Sha256::new().update_path(dst)?.finish_hex(); + cksums.insert(relative.to_str().unwrap().replace("\\", "/"), cksum); + } + Ok(()) +} diff --git a/src/cargo/sources/config.rs b/src/cargo/sources/config.rs new file mode 100644 index 00000000000..90e5465ce16 --- /dev/null +++ b/src/cargo/sources/config.rs @@ -0,0 +1,256 @@ +//! Implementation of configuration for various sources +//! +//! This module will parse the various `source.*` TOML configuration keys into a +//! structure usable by Cargo itself. Currently this is primarily used to map +//! sources to one another via the `replace-with` key in `.cargo/config`. + +use std::collections::{HashMap, HashSet}; +use std::path::{Path, PathBuf}; + +use log::debug; +use url::Url; + +use crate::core::{GitReference, PackageId, Source, SourceId}; +use crate::sources::{ReplacedSource, CRATES_IO_REGISTRY}; +use crate::util::config::ConfigValue; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{Config, IntoUrl}; + +#[derive(Clone)] +pub struct SourceConfigMap<'cfg> { + cfgs: HashMap, + id2name: HashMap, + config: &'cfg Config, +} + +/// Configuration for a particular source, found in TOML looking like: +/// +/// ```toml +/// [source.crates-io] +/// registry = 'https://github.com/rust-lang/crates.io-index' +/// replace-with = 'foo' # optional +/// ``` +#[derive(Clone)] +struct SourceConfig { + // id this source corresponds to, inferred from the various defined keys in + // the configuration + id: SourceId, + + // Name of the source that this source should be replaced with. This field + // is a tuple of (name, path) where path is where this configuration key was + // defined (the literal `.cargo/config` file). + replace_with: Option<(String, PathBuf)>, +} + +impl<'cfg> SourceConfigMap<'cfg> { + pub fn new(config: &'cfg Config) -> CargoResult> { + let mut base = SourceConfigMap::empty(config)?; + if let Some(table) = config.get_table("source")? { + for (key, value) in table.val.iter() { + base.add_config(key, value)?; + } + } + Ok(base) + } + + pub fn empty(config: &'cfg Config) -> CargoResult> { + let mut base = SourceConfigMap { + cfgs: HashMap::new(), + id2name: HashMap::new(), + config, + }; + base.add( + CRATES_IO_REGISTRY, + SourceConfig { + id: SourceId::crates_io(config)?, + replace_with: None, + }, + ); + Ok(base) + } + + pub fn config(&self) -> &'cfg Config { + self.config + } + + pub fn load( + &self, + id: SourceId, + yanked_whitelist: &HashSet, + ) -> CargoResult> { + debug!("loading: {}", id); + let mut name = match self.id2name.get(&id) { + Some(name) => name, + None => return Ok(id.load(self.config, yanked_whitelist)?), + }; + let mut path = Path::new("/"); + let orig_name = name; + let new_id; + loop { + let cfg = match self.cfgs.get(name) { + Some(cfg) => cfg, + None => failure::bail!( + "could not find a configured source with the \ + name `{}` when attempting to lookup `{}` \ + (configuration in `{}`)", + name, + orig_name, + path.display() + ), + }; + match cfg.replace_with { + Some((ref s, ref p)) => { + name = s; + path = p; + } + None if id == cfg.id => return Ok(id.load(self.config, yanked_whitelist)?), + None => { + new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string())); + break; + } + } + debug!("following pointer to {}", name); + if name == orig_name { + failure::bail!( + "detected a cycle of `replace-with` sources, the source \ + `{}` is eventually replaced with itself \ + (configuration in `{}`)", + name, + path.display() + ) + } + } + + let new_src = new_id.load( + self.config, + &yanked_whitelist + .iter() + .map(|p| p.map_source(id, new_id)) + .collect(), + )?; + let old_src = id.load(self.config, yanked_whitelist)?; + if !new_src.supports_checksums() && old_src.supports_checksums() { + failure::bail!( + "\ +cannot replace `{orig}` with `{name}`, the source `{orig}` supports \ +checksums, but `{name}` does not + +a lock file compatible with `{orig}` cannot be generated in this situation +", + orig = orig_name, + name = name + ); + } + + if old_src.requires_precise() && id.precise().is_none() { + failure::bail!( + "\ +the source {orig} requires a lock file to be present first before it can be +used against vendored source code + +remove the source replacement configuration, generate a lock file, and then +restore the source replacement configuration to continue the build +", + orig = orig_name + ); + } + + Ok(Box::new(ReplacedSource::new(id, new_id, new_src))) + } + + fn add(&mut self, name: &str, cfg: SourceConfig) { + self.id2name.insert(cfg.id, name.to_string()); + self.cfgs.insert(name.to_string(), cfg); + } + + fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> { + let (table, _path) = cfg.table(&format!("source.{}", name))?; + let mut srcs = Vec::new(); + if let Some(val) = table.get("registry") { + let url = url(val, &format!("source.{}.registry", name))?; + srcs.push(SourceId::for_registry(&url)?); + } + if let Some(val) = table.get("local-registry") { + let (s, path) = val.string(&format!("source.{}.local-registry", name))?; + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(SourceId::for_local_registry(&path)?); + } + if let Some(val) = table.get("directory") { + let (s, path) = val.string(&format!("source.{}.directory", name))?; + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(SourceId::for_directory(&path)?); + } + if let Some(val) = table.get("git") { + let url = url(val, &format!("source.{}.git", name))?; + let r#try = |s: &str| { + let val = match table.get(s) { + Some(s) => s, + None => return Ok(None), + }; + let key = format!("source.{}.{}", name, s); + val.string(&key).map(Some) + }; + let reference = match r#try("branch")? { + Some(b) => GitReference::Branch(b.0.to_string()), + None => match r#try("tag")? { + Some(b) => GitReference::Tag(b.0.to_string()), + None => match r#try("rev")? { + Some(b) => GitReference::Rev(b.0.to_string()), + None => GitReference::Branch("master".to_string()), + }, + }, + }; + srcs.push(SourceId::for_git(&url, reference)?); + } + if name == "crates-io" && srcs.is_empty() { + srcs.push(SourceId::crates_io(self.config)?); + } + + let mut srcs = srcs.into_iter(); + let src = srcs.next().ok_or_else(|| { + failure::format_err!( + "no source URL specified for `source.{}`, need \ + either `registry` or `local-registry` defined", + name + ) + })?; + if srcs.next().is_some() { + failure::bail!("more than one source URL specified for `source.{}`", name) + } + + let mut replace_with = None; + if let Some(val) = table.get("replace-with") { + let (s, path) = val.string(&format!("source.{}.replace-with", name))?; + replace_with = Some((s.to_string(), path.to_path_buf())); + } + + self.add( + name, + SourceConfig { + id: src, + replace_with, + }, + ); + + return Ok(()); + + fn url(cfg: &ConfigValue, key: &str) -> CargoResult { + let (url, path) = cfg.string(key)?; + let url = url.into_url().chain_err(|| { + format!( + "configuration key `{}` specified an invalid \ + URL (in {})", + key, + path.display() + ) + })?; + Ok(url) + } + } +} diff --git a/src/cargo/sources/directory.rs b/src/cargo/sources/directory.rs new file mode 100644 index 00000000000..fa27ecea0bb --- /dev/null +++ b/src/cargo/sources/directory.rs @@ -0,0 +1,207 @@ +use std::collections::HashMap; +use std::fmt::{self, Debug, Formatter}; +use std::path::{Path, PathBuf}; + +use serde::Deserialize; + +use crate::core::source::MaybePackage; +use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; +use crate::sources::PathSource; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::paths; +use crate::util::{Config, Sha256}; + +pub struct DirectorySource<'cfg> { + source_id: SourceId, + root: PathBuf, + packages: HashMap, + config: &'cfg Config, +} + +#[derive(Deserialize)] +struct Checksum { + package: Option, + files: HashMap, +} + +impl<'cfg> DirectorySource<'cfg> { + pub fn new(path: &Path, id: SourceId, config: &'cfg Config) -> DirectorySource<'cfg> { + DirectorySource { + source_id: id, + root: path.to_path_buf(), + config, + packages: HashMap::new(), + } + } +} + +impl<'cfg> Debug for DirectorySource<'cfg> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "DirectorySource {{ root: {:?} }}", self.root) + } +} + +impl<'cfg> Source for DirectorySource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + let packages = self.packages.values().map(|p| &p.0); + let matches = packages.filter(|pkg| dep.matches(pkg.summary())); + for summary in matches.map(|pkg| pkg.summary().clone()) { + f(summary); + } + Ok(()) + } + + fn fuzzy_query(&mut self, _dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + let packages = self.packages.values().map(|p| &p.0); + for summary in packages.map(|pkg| pkg.summary().clone()) { + f(summary); + } + Ok(()) + } + + fn supports_checksums(&self) -> bool { + true + } + + fn requires_precise(&self) -> bool { + true + } + + fn source_id(&self) -> SourceId { + self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + self.packages.clear(); + let entries = self.root.read_dir().chain_err(|| { + format!( + "failed to read root of directory source: {}", + self.root.display() + ) + })?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + + // Ignore hidden/dot directories as they typically don't contain + // crates and otherwise may conflict with a VCS + // (rust-lang/cargo#3414). + if let Some(s) = path.file_name().and_then(|s| s.to_str()) { + if s.starts_with('.') { + continue; + } + } + + // Vendor directories are often checked into a VCS, but throughout + // the lifetime of a vendor dir crates are often added and deleted. + // Some VCS implementations don't always fully delete the directory + // when a dir is removed from a different checkout. Sometimes a + // mostly-empty dir is left behind. + // + // Additionally vendor directories are sometimes accompanied with + // readme files and other auxiliary information not too interesting + // to Cargo. + // + // To help handle all this we only try processing folders with a + // `Cargo.toml` in them. This has the upside of being pretty + // flexible with the contents of vendor directories but has the + // downside of accidentally misconfigured vendor directories + // silently returning less crates. + if !path.join("Cargo.toml").exists() { + continue; + } + + let mut src = PathSource::new(&path, self.source_id, self.config); + src.update()?; + let mut pkg = src.root_package()?; + + let cksum_file = path.join(".cargo-checksum.json"); + let cksum = paths::read(&path.join(cksum_file)).chain_err(|| { + format!( + "failed to load checksum `.cargo-checksum.json` \ + of {} v{}", + pkg.package_id().name(), + pkg.package_id().version() + ) + })?; + let cksum: Checksum = serde_json::from_str(&cksum).chain_err(|| { + format!( + "failed to decode `.cargo-checksum.json` of \ + {} v{}", + pkg.package_id().name(), + pkg.package_id().version() + ) + })?; + + if let Some(package) = &cksum.package { + pkg.manifest_mut() + .summary_mut() + .set_checksum(package.clone()); + } + self.packages.insert(pkg.package_id(), (pkg, cksum)); + } + + Ok(()) + } + + fn download(&mut self, id: PackageId) -> CargoResult { + self.packages + .get(&id) + .map(|p| &p.0) + .cloned() + .map(MaybePackage::Ready) + .ok_or_else(|| failure::format_err!("failed to find package with id: {}", id)) + } + + fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { + panic!("no downloads to do") + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } + + fn verify(&self, id: PackageId) -> CargoResult<()> { + let (pkg, cksum) = match self.packages.get(&id) { + Some(&(ref pkg, ref cksum)) => (pkg, cksum), + None => failure::bail!("failed to find entry for `{}` in directory source", id), + }; + + for (file, cksum) in cksum.files.iter() { + let file = pkg.root().join(file); + let actual = Sha256::new() + .update_path(&file) + .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))? + .finish_hex(); + if &*actual != cksum { + failure::bail!( + "the listed checksum of `{}` has changed:\n\ + expected: {}\n\ + actual: {}\n\ + \n\ + directory sources are not intended to be edited, if \ + modifications are required then it is recommended \ + that [replace] is used with a forked copy of the \ + source\ + ", + file.display(), + cksum, + actual + ); + } + } + + Ok(()) + } + + fn describe(&self) -> String { + format!("directory source `{}`", self.root.display()) + } + + fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} + + fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult { + Ok(false) + } +} diff --git a/src/cargo/sources/git/mod.rs b/src/cargo/sources/git/mod.rs index 0ef4db4d668..86d0094d19e 100644 --- a/src/cargo/sources/git/mod.rs +++ b/src/cargo/sources/git/mod.rs @@ -1,4 +1,4 @@ -pub use self::utils::{GitRemote, GitDatabase, GitCheckout, GitRevision, fetch}; -pub use self::source::{GitSource, canonicalize_url}; -mod utils; +pub use self::source::{canonicalize_url, GitSource}; +pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote, GitRevision}; mod source; +mod utils; diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index 1863e8fd237..e417da9cb01 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -1,258 +1,296 @@ use std::fmt::{self, Debug, Formatter}; -use std::hash::{Hash, Hasher, SipHasher}; -use std::mem; -use std::path::PathBuf; -use url::{self, Url}; +use log::trace; +use url::Url; -use core::source::{Source, SourceId}; -use core::GitReference; -use core::{Package, PackageId, Summary, Registry, Dependency}; -use util::{CargoResult, Config, to_hex}; -use sources::PathSource; -use sources::git::utils::{GitRemote, GitRevision}; +use crate::core::source::{MaybePackage, Source, SourceId}; +use crate::core::GitReference; +use crate::core::{Dependency, Package, PackageId, Summary}; +use crate::sources::git::utils::{GitRemote, GitRevision}; +use crate::sources::PathSource; +use crate::util::errors::CargoResult; +use crate::util::hex::short_hash; +use crate::util::Config; -/* TODO: Refactor GitSource to delegate to a PathSource - */ pub struct GitSource<'cfg> { remote: GitRemote, reference: GitReference, - db_path: PathBuf, - checkout_path: PathBuf, source_id: SourceId, path_source: Option>, rev: Option, + ident: String, config: &'cfg Config, } impl<'cfg> GitSource<'cfg> { - pub fn new(source_id: &SourceId, - config: &'cfg Config) -> GitSource<'cfg> { + pub fn new(source_id: SourceId, config: &'cfg Config) -> CargoResult> { assert!(source_id.is_git(), "id is not git, id={}", source_id); - let reference = match source_id.git_reference() { - Some(reference) => reference, - None => panic!("Not a git source; id={}", source_id), - }; - let remote = GitRemote::new(source_id.url()); - let ident = ident(source_id.url()); - - let db_path = config.git_db_path().join(&ident); - - let reference_path = match *reference { - GitReference::Branch(ref s) | - GitReference::Tag(ref s) | - GitReference::Rev(ref s) => s.to_string(), - }; - let checkout_path = config.git_checkout_path() - .join(&ident) - .join(&reference_path); + let ident = ident(source_id.url())?; let reference = match source_id.precise() { Some(s) => GitReference::Rev(s.to_string()), None => source_id.git_reference().unwrap().clone(), }; - GitSource { - remote: remote, - reference: reference, - db_path: db_path, - checkout_path: checkout_path, - source_id: source_id.clone(), + let source = GitSource { + remote, + reference, + source_id, path_source: None, rev: None, - config: config, - } + ident, + config, + }; + + Ok(source) } - pub fn url(&self) -> &Url { self.remote.url() } -} + pub fn url(&self) -> &Url { + self.remote.url() + } -fn ident(url: &Url) -> String { - let mut hasher = SipHasher::new_with_keys(0,0); + pub fn read_packages(&mut self) -> CargoResult> { + if self.path_source.is_none() { + self.update()?; + } + self.path_source.as_mut().unwrap().read_packages() + } +} - // FIXME: this really should be able to not use to_str() everywhere, but the - // compiler seems to currently ask for static lifetimes spuriously. - // Perhaps related to rust-lang/rust#15144 - let url = canonicalize_url(url); - let ident = url.path().unwrap_or(&[]) - .last().map(|a| a.clone()).unwrap_or(String::new()); +fn ident(url: &Url) -> CargoResult { + let url = canonicalize_url(url)?; + let ident = url + .path_segments() + .and_then(|mut s| s.next_back()) + .unwrap_or(""); - let ident = if ident == "" { - "_empty".to_string() - } else { - ident - }; + let ident = if ident == "" { "_empty" } else { ident }; - url.hash(&mut hasher); - format!("{}-{}", ident, to_hex(hasher.finish())) + Ok(format!("{}-{}", ident, short_hash(&url))) } -// Some hacks and heuristics for making equivalent URLs hash the same -pub fn canonicalize_url(url: &Url) -> Url { +// Some hacks and heuristics for making equivalent URLs hash the same. +pub fn canonicalize_url(url: &Url) -> CargoResult { let mut url = url.clone(); - // Strip a trailing slash - match url.scheme_data { - url::SchemeData::Relative(ref mut rel) => { - if rel.path.last().map(|s| s.is_empty()).unwrap_or(false) { - rel.path.pop(); - } - } - _ => {} + // cannot-be-a-base-urls (e.g., `github.com:rust-lang-nursery/rustfmt.git`) + // are not supported. + if url.cannot_be_a_base() { + failure::bail!( + "invalid url `{}`: cannot-be-a-base-URLs are not supported", + url + ) + } + + // Strip a trailing slash. + if url.path().ends_with('/') { + url.path_segments_mut().unwrap().pop_if_empty(); } - // HACKHACK: For github URL's specifically just lowercase - // everything. GitHub treats both the same, but they hash + // HACK: for GitHub URLs specifically, just lower-case + // everything. GitHub treats both the same, but they hash // differently, and we're gonna be hashing them. This wants a more // general solution, and also we're almost certainly not using the - // same case conversion rules that GitHub does. (#84) - if url.domain() == Some("github.com") { - url.scheme = "https".to_string(); - match url.scheme_data { - url::SchemeData::Relative(ref mut rel) => { - rel.port = Some(443); - rel.default_port = Some(443); - let path = mem::replace(&mut rel.path, Vec::new()); - rel.path = path.into_iter().map(|s| { - s.chars().flat_map(|c| c.to_lowercase()).collect() - }).collect(); - } - _ => {} - } + // same case conversion rules that GitHub does. (See issue #84.) + if url.host_str() == Some("github.com") { + url.set_scheme("https").unwrap(); + let path = url.path().to_lowercase(); + url.set_path(&path); } - // Repos generally can be accessed with or w/o '.git' - match url.scheme_data { - url::SchemeData::Relative(ref mut rel) => { - let needs_chopping = { - let last = rel.path.last().map(|s| &s[..]).unwrap_or(""); - last.ends_with(".git") - }; - if needs_chopping { - let last = rel.path.pop().unwrap(); - rel.path.push(last[..last.len() - 4].to_string()) - } - } - _ => {} + // Repos can generally be accessed with or without `.git` extension. + let needs_chopping = url.path().ends_with(".git"); + if needs_chopping { + let last = { + let last = url.path_segments().unwrap().next_back().unwrap(); + last[..last.len() - 4].to_owned() + }; + url.path_segments_mut().unwrap().pop().push(&last); } - return url; + Ok(url) } impl<'cfg> Debug for GitSource<'cfg> { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - try!(write!(f, "git repo at {}", self.remote.url())); + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "git repo at {}", self.remote.url())?; - match self.reference.to_ref_string() { + match self.reference.pretty_ref() { Some(s) => write!(f, " ({})", s), - None => Ok(()) + None => Ok(()), } } } -impl<'cfg> Registry for GitSource<'cfg> { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - let src = self.path_source.as_mut() - .expect("BUG: update() must be called before query()"); - src.query(dep) +impl<'cfg> Source for GitSource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + let src = self + .path_source + .as_mut() + .expect("BUG: `update()` must be called before `query()`"); + src.query(dep, f) + } + + fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + let src = self + .path_source + .as_mut() + .expect("BUG: `update()` must be called before `query()`"); + src.fuzzy_query(dep, f) + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + true + } + + fn source_id(&self) -> SourceId { + self.source_id } -} -impl<'cfg> Source for GitSource<'cfg> { fn update(&mut self) -> CargoResult<()> { - let actual_rev = self.remote.rev_for(&self.db_path, &self.reference); - let should_update = actual_rev.is_err() || - self.source_id.precise().is_none(); + let git_path = self.config.git_path(); + let git_path = self.config.assert_package_cache_locked(&git_path); + let db_path = git_path.join("db").join(&self.ident); + + if self.config.offline() && !db_path.exists() { + failure::bail!( + "can't checkout from '{}': you are in the offline mode (--offline)", + self.remote.url() + ); + } + + // Resolve our reference to an actual revision, and check if the + // database already has that revision. If it does, we just load a + // database pinned at that revision, and if we don't we issue an update + // to try to find the revision. + let actual_rev = self.remote.rev_for(&db_path, &self.reference); + let should_update = actual_rev.is_err() || self.source_id.precise().is_none(); - let (repo, actual_rev) = if should_update { - try!(self.config.shell().status("Updating", - format!("git repository `{}`", self.remote.url()))); + let (db, actual_rev) = if should_update && !self.config.offline() { + self.config.shell().status( + "Updating", + format!("git repository `{}`", self.remote.url()), + )?; trace!("updating git source `{:?}`", self.remote); - let repo = try!(self.remote.checkout(&self.db_path)); - let rev = try!(repo.rev_for(&self.reference)); - (repo, rev) + + self.remote + .checkout(&db_path, &self.reference, self.config)? } else { - (try!(self.remote.db_at(&self.db_path)), actual_rev.unwrap()) + (self.remote.db_at(&db_path)?, actual_rev.unwrap()) }; - try!(repo.copy_to(actual_rev.clone(), &self.checkout_path)); + // Don’t use the full hash, in order to contribute less to reaching the + // path length limit on Windows. See + // . + let short_id = db.to_short_id(&actual_rev).unwrap(); + + let checkout_path = git_path + .join("checkouts") + .join(&self.ident) + .join(short_id.as_str()); + + // Copy the database to the checkout location. + db.copy_to(actual_rev.clone(), &checkout_path, self.config)?; let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); - let path_source = PathSource::new(&self.checkout_path, &source_id, - self.config); + let path_source = PathSource::new_recursive(&checkout_path, source_id, self.config); self.path_source = Some(path_source); self.rev = Some(actual_rev); self.path_source.as_mut().unwrap().update() } - fn download(&mut self, _: &[PackageId]) -> CargoResult<()> { - // TODO: assert! that the PackageId is contained by the source - Ok(()) + fn download(&mut self, id: PackageId) -> CargoResult { + trace!( + "getting packages for package ID `{}` from `{:?}`", + id, + self.remote + ); + self.path_source + .as_mut() + .expect("BUG: `update()` must be called before `get()`") + .download(id) } - fn get(&self, ids: &[PackageId]) -> CargoResult> { - trace!("getting packages for package ids `{:?}` from `{:?}`", ids, - self.remote); - self.path_source.as_ref().expect("BUG: update() must be called \ - before get()").get(ids) + fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { + panic!("no download should have started") } fn fingerprint(&self, _pkg: &Package) -> CargoResult { Ok(self.rev.as_ref().unwrap().to_string()) } + + fn describe(&self) -> String { + format!("Git repository {}", self.source_id) + } + + fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} + + fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult { + Ok(false) + } } #[cfg(test)] mod test { - use url::Url; use super::ident; - use util::ToUrl; + use crate::util::IntoUrl; + use url::Url; #[test] pub fn test_url_to_path_ident_with_path() { - let ident = ident(&url("https://github.com/carlhuda/cargo")); + let ident = ident(&url("https://github.com/carlhuda/cargo")).unwrap(); assert!(ident.starts_with("cargo-")); } #[test] pub fn test_url_to_path_ident_without_path() { - let ident = ident(&url("https://github.com")); + let ident = ident(&url("https://github.com")).unwrap(); assert!(ident.starts_with("_empty-")); } #[test] fn test_canonicalize_idents_by_stripping_trailing_url_slash() { - let ident1 = ident(&url("https://github.com/PistonDevelopers/piston/")); - let ident2 = ident(&url("https://github.com/PistonDevelopers/piston")); + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston/")).unwrap(); + let ident2 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); assert_eq!(ident1, ident2); } #[test] fn test_canonicalize_idents_by_lowercasing_github_urls() { - let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")); - let ident2 = ident(&url("https://github.com/pistondevelopers/piston")); + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("https://github.com/pistondevelopers/piston")).unwrap(); assert_eq!(ident1, ident2); } #[test] fn test_canonicalize_idents_by_stripping_dot_git() { - let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")); - let ident2 = ident(&url("https://github.com/PistonDevelopers/piston.git")); + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("https://github.com/PistonDevelopers/piston.git")).unwrap(); assert_eq!(ident1, ident2); } #[test] - fn test_canonicalize_idents_different_protocls() { - let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")); - let ident2 = ident(&url("git://github.com/PistonDevelopers/piston")); + fn test_canonicalize_idents_different_protocols() { + let ident1 = ident(&url("https://github.com/PistonDevelopers/piston")).unwrap(); + let ident2 = ident(&url("git://github.com/PistonDevelopers/piston")).unwrap(); assert_eq!(ident1, ident2); } + #[test] + fn test_canonicalize_cannot_be_a_base_urls() { + assert!(ident(&url("github.com:PistonDevelopers/piston")).is_err()); + assert!(ident(&url("google.com:PistonDevelopers/piston")).is_err()); + } + fn url(s: &str) -> Url { - s.to_url().unwrap() + s.into_url().unwrap() } } diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index 51f69a6d27e..5ad3edea79c 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -1,96 +1,84 @@ +use std::env; use std::fmt; +use std::fs::{self, File}; +use std::mem; use std::path::{Path, PathBuf}; -use std::fs; +use std::process::Command; -use rustc_serialize::{Encodable, Encoder}; -use url::Url; +use curl::easy::{Easy, List}; use git2::{self, ObjectType}; +use log::{debug, info}; +use serde::ser; +use serde::Serialize; +use url::Url; -use core::GitReference; -use util::{CargoResult, ChainError, human, ToUrl, internal}; +use crate::core::GitReference; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::paths; +use crate::util::process_builder::process; +use crate::util::{internal, network, Config, IntoUrl, Progress}; #[derive(PartialEq, Clone, Debug)] pub struct GitRevision(git2::Oid); -impl fmt::Display for GitRevision { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.0, f) +impl ser::Serialize for GitRevision { + fn serialize(&self, s: S) -> Result { + serialize_str(self, s) } } -/// GitRemote represents a remote repository. It gets cloned into a local -/// GitDatabase. -#[derive(PartialEq,Clone,Debug)] -pub struct GitRemote { - url: Url, +fn serialize_str(t: &T, s: S) -> Result +where + T: fmt::Display, + S: ser::Serializer, +{ + s.collect_str(t) } -#[derive(PartialEq,Clone,RustcEncodable)] -struct EncodableGitRemote { - url: String, +impl fmt::Display for GitRevision { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } } -impl Encodable for GitRemote { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - EncodableGitRemote { - url: self.url.to_string() - }.encode(s) +pub struct GitShortID(git2::Buf); + +impl GitShortID { + pub fn as_str(&self) -> &str { + self.0.as_str().unwrap() } } -/// GitDatabase is a local clone of a remote repository's database. Multiple -/// GitCheckouts can be cloned from this GitDatabase. +/// `GitRemote` represents a remote repository. It gets cloned into a local +/// `GitDatabase`. +#[derive(PartialEq, Clone, Debug, Serialize)] +pub struct GitRemote { + #[serde(serialize_with = "serialize_str")] + url: Url, +} + +/// `GitDatabase` is a local clone of a remote repository's database. Multiple +/// `GitCheckouts` can be cloned from this `GitDatabase`. +#[derive(Serialize)] pub struct GitDatabase { remote: GitRemote, path: PathBuf, + #[serde(skip_serializing)] repo: git2::Repository, } -#[derive(RustcEncodable)] -pub struct EncodableGitDatabase { - remote: GitRemote, - path: String, -} - -impl Encodable for GitDatabase { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - EncodableGitDatabase { - remote: self.remote.clone(), - path: self.path.display().to_string() - }.encode(s) - } -} - -/// GitCheckout is a local checkout of a particular revision. Calling +/// `GitCheckout` is a local checkout of a particular revision. Calling /// `clone_into` with a reference will resolve the reference into a revision, -/// and return a CargoError if no revision for that reference was found. +/// and return a `failure::Error` if no revision for that reference was found. +#[derive(Serialize)] pub struct GitCheckout<'a> { database: &'a GitDatabase, location: PathBuf, revision: GitRevision, + #[serde(skip_serializing)] repo: git2::Repository, } -#[derive(RustcEncodable)] -pub struct EncodableGitCheckout { - database: EncodableGitDatabase, - location: String, - revision: String, -} - -impl<'a> Encodable for GitCheckout<'a> { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - EncodableGitCheckout { - location: self.location.display().to_string(), - revision: self.revision.to_string(), - database: EncodableGitDatabase { - remote: self.database.remote.clone(), - path: self.database.path.display().to_string(), - }, - }.encode(s) - } -} - // Implementations impl GitRemote { @@ -102,321 +90,846 @@ impl GitRemote { &self.url } - pub fn rev_for(&self, path: &Path, reference: &GitReference) - -> CargoResult { - let db = try!(self.db_at(path)); - db.rev_for(reference) + pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { + reference.resolve(&self.db_at(path)?.repo) } - pub fn checkout(&self, into: &Path) -> CargoResult { - let repo = match git2::Repository::open(into) { - Ok(repo) => { - try!(self.fetch_into(&repo).chain_error(|| { - human(format!("failed to fetch into {}", into.display())) - })); - repo + pub fn checkout( + &self, + into: &Path, + reference: &GitReference, + cargo_config: &Config, + ) -> CargoResult<(GitDatabase, GitRevision)> { + let mut repo_and_rev = None; + if let Ok(mut repo) = git2::Repository::open(into) { + self.fetch_into(&mut repo, cargo_config) + .chain_err(|| format!("failed to fetch into {}", into.display()))?; + if let Ok(rev) = reference.resolve(&repo) { + repo_and_rev = Some((repo, rev)); } - Err(..) => { - try!(self.clone_into(into).chain_error(|| { - human(format!("failed to clone into: {}", into.display())) - })) + } + let (repo, rev) = match repo_and_rev { + Some(pair) => pair, + None => { + let repo = self + .clone_into(into, cargo_config) + .chain_err(|| format!("failed to clone into: {}", into.display()))?; + let rev = reference.resolve(&repo)?; + (repo, rev) } }; - Ok(GitDatabase { - remote: self.clone(), - path: into.to_path_buf(), - repo: repo, - }) + Ok(( + GitDatabase { + remote: self.clone(), + path: into.to_path_buf(), + repo, + }, + rev, + )) } pub fn db_at(&self, db_path: &Path) -> CargoResult { - let repo = try!(git2::Repository::open(db_path)); + let repo = git2::Repository::open(db_path)?; Ok(GitDatabase { remote: self.clone(), path: db_path.to_path_buf(), - repo: repo, + repo, }) } - fn fetch_into(&self, dst: &git2::Repository) -> CargoResult<()> { + fn fetch_into(&self, dst: &mut git2::Repository, cargo_config: &Config) -> CargoResult<()> { // Create a local anonymous remote in the repository to fetch the url - let url = self.url.to_string(); let refspec = "refs/heads/*:refs/heads/*"; - fetch(dst, &url, refspec) + fetch(dst, &self.url, refspec, cargo_config) } - fn clone_into(&self, dst: &Path) -> CargoResult { - let url = self.url.to_string(); + fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult { if fs::metadata(&dst).is_ok() { - try!(fs::remove_dir_all(dst)); + paths::remove_dir_all(dst)?; } - try!(fs::create_dir_all(dst)); - let repo = try!(git2::Repository::init_bare(dst)); - try!(fetch(&repo, &url, "refs/heads/*:refs/heads/*")); + fs::create_dir_all(dst)?; + let mut repo = init(dst, true)?; + fetch( + &mut repo, + &self.url, + "refs/heads/*:refs/heads/*", + cargo_config, + )?; Ok(repo) } } impl GitDatabase { - fn path<'a>(&'a self) -> &'a Path { - &self.path - } - - pub fn copy_to(&self, rev: GitRevision, dest: &Path) - -> CargoResult { - let checkout = match git2::Repository::open(dest) { - Ok(repo) => { - let checkout = GitCheckout::new(dest, self, rev, repo); - if !checkout.is_fresh() { - try!(checkout.fetch()); - try!(checkout.reset()); - assert!(checkout.is_fresh()); + pub fn copy_to( + &self, + rev: GitRevision, + dest: &Path, + cargo_config: &Config, + ) -> CargoResult> { + let mut checkout = None; + if let Ok(repo) = git2::Repository::open(dest) { + let mut co = GitCheckout::new(dest, self, rev.clone(), repo); + if !co.is_fresh() { + // After a successful fetch operation do a sanity check to + // ensure we've got the object in our database to reset to. This + // can fail sometimes for corrupt repositories where the fetch + // operation succeeds but the object isn't actually there. + co.fetch(cargo_config)?; + if co.has_object() { + co.reset(cargo_config)?; + assert!(co.is_fresh()); + checkout = Some(co); } - checkout + } else { + checkout = Some(co); } - Err(..) => try!(GitCheckout::clone_into(dest, self, rev)), }; - try!(checkout.update_submodules().chain_error(|| { - internal("failed to update submodules") - })); + let checkout = match checkout { + Some(c) => c, + None => GitCheckout::clone_into(dest, self, rev, cargo_config)?, + }; + checkout.update_submodules(cargo_config)?; Ok(checkout) } - pub fn rev_for(&self, reference: &GitReference) -> CargoResult { - let id = match *reference { - GitReference::Tag(ref s) => { - try!((|| { - let refname = format!("refs/tags/{}", s); - let id = try!(self.repo.refname_to_id(&refname)); - let obj = try!(self.repo.find_object(id, None)); - let obj = try!(obj.peel(ObjectType::Commit)); - Ok(obj.id()) - }).chain_error(|| { - human(format!("failed to find tag `{}`", s)) - })) - } + pub fn to_short_id(&self, revision: &GitRevision) -> CargoResult { + let obj = self.repo.find_object(revision.0, None)?; + Ok(GitShortID(obj.short_id()?)) + } + + pub fn has_ref(&self, reference: &str) -> CargoResult<()> { + self.repo.revparse_single(reference)?; + Ok(()) + } +} + +impl GitReference { + fn resolve(&self, repo: &git2::Repository) -> CargoResult { + let id = match *self { + GitReference::Tag(ref s) => (|| -> CargoResult { + let refname = format!("refs/tags/{}", s); + let id = repo.refname_to_id(&refname)?; + let obj = repo.find_object(id, None)?; + let obj = obj.peel(ObjectType::Commit)?; + Ok(obj.id()) + })() + .chain_err(|| format!("failed to find tag `{}`", s))?, GitReference::Branch(ref s) => { - try!((|| { - let b = try!(self.repo.find_branch(s, git2::BranchType::Local)); - b.get().target().chain_error(|| { - human(format!("branch `{}` did not have a target", s)) - }) - }).chain_error(|| { - human(format!("failed to find branch `{}`", s)) - })) + let b = repo + .find_branch(s, git2::BranchType::Local) + .chain_err(|| format!("failed to find branch `{}`", s))?; + b.get() + .target() + .ok_or_else(|| failure::format_err!("branch `{}` did not have a target", s))? } GitReference::Rev(ref s) => { - let obj = try!(self.repo.revparse_single(s)); - obj.id() + let obj = repo.revparse_single(s)?; + match obj.as_tag() { + Some(tag) => tag.target_id(), + None => obj.id(), + } } }; Ok(GitRevision(id)) } - - pub fn has_ref(&self, reference: &str) -> CargoResult<()> { - try!(self.repo.revparse_single(reference)); - Ok(()) - } } impl<'a> GitCheckout<'a> { - fn new(path: &Path, database: &'a GitDatabase, revision: GitRevision, - repo: git2::Repository) - -> GitCheckout<'a> - { + fn new( + path: &Path, + database: &'a GitDatabase, + revision: GitRevision, + repo: git2::Repository, + ) -> GitCheckout<'a> { GitCheckout { location: path.to_path_buf(), - database: database, - revision: revision, - repo: repo, + database, + revision, + repo, } } - fn clone_into(into: &Path, database: &'a GitDatabase, - revision: GitRevision) - -> CargoResult> - { - let repo = try!(GitCheckout::clone_repo(database.path(), into)); - let checkout = GitCheckout::new(into, database, revision, repo); - try!(checkout.reset()); - Ok(checkout) - } - - fn clone_repo(source: &Path, into: &Path) -> CargoResult { + fn clone_into( + into: &Path, + database: &'a GitDatabase, + revision: GitRevision, + config: &Config, + ) -> CargoResult> { let dirname = into.parent().unwrap(); - - try!(fs::create_dir_all(&dirname).chain_error(|| { - human(format!("Couldn't mkdir {}", dirname.display())) - })); - - if fs::metadata(&into).is_ok() { - try!(fs::remove_dir_all(into).chain_error(|| { - human(format!("Couldn't rmdir {}", into.display())) - })); + fs::create_dir_all(&dirname) + .chain_err(|| format!("Couldn't mkdir {}", dirname.display()))?; + if into.exists() { + paths::remove_dir_all(into)?; } - let url = try!(source.to_url().map_err(human)); - let url = url.to_string(); - let repo = try!(git2::Repository::clone(&url, into).chain_error(|| { - internal(format!("failed to clone {} into {}", source.display(), - into.display())) - })); - Ok(repo) + // we're doing a local filesystem-to-filesystem clone so there should + // be no need to respect global configuration options, so pass in + // an empty instance of `git2::Config` below. + let git_config = git2::Config::new()?; + + // Clone the repository, but make sure we use the "local" option in + // libgit2 which will attempt to use hardlinks to set up the database. + // This should speed up the clone operation quite a bit if it works. + // + // Note that we still use the same fetch options because while we don't + // need authentication information we may want progress bars and such. + let url = database.path.into_url()?; + let mut repo = None; + with_fetch_options(&git_config, &url, config, &mut |fopts| { + let mut checkout = git2::build::CheckoutBuilder::new(); + checkout.dry_run(); // we'll do this below during a `reset` + + let r = git2::build::RepoBuilder::new() + // use hard links and/or copy the database, we're doing a + // filesystem clone so this'll speed things up quite a bit. + .clone_local(git2::build::CloneLocal::Local) + .with_checkout(checkout) + .fetch_options(fopts) + // .remote_create(|repo, _name, url| repo.remote_anonymous(url)) + .clone(url.as_str(), into)?; + repo = Some(r); + Ok(()) + })?; + let repo = repo.unwrap(); + + let checkout = GitCheckout::new(into, database, revision, repo); + checkout.reset(config)?; + Ok(checkout) } fn is_fresh(&self) -> bool { match self.repo.revparse_single("HEAD") { - Ok(head) => head.id().to_string() == self.revision.to_string(), + Ok(ref head) if head.id() == self.revision.0 => { + // See comments in reset() for why we check this + self.location.join(".cargo-ok").exists() + } _ => false, } } - fn fetch(&self) -> CargoResult<()> { + fn fetch(&mut self, cargo_config: &Config) -> CargoResult<()> { info!("fetch {}", self.repo.path().display()); - let url = try!(self.database.path.to_url().map_err(human)); - let url = url.to_string(); + let url = self.database.path.into_url()?; let refspec = "refs/heads/*:refs/heads/*"; - try!(fetch(&self.repo, &url, refspec)); + fetch(&mut self.repo, &url, refspec, cargo_config)?; Ok(()) } - fn reset(&self) -> CargoResult<()> { + fn has_object(&self) -> bool { + self.repo.find_object(self.revision.0, None).is_ok() + } + + fn reset(&self, config: &Config) -> CargoResult<()> { + // If we're interrupted while performing this reset (e.g., we die because + // of a signal) Cargo needs to be sure to try to check out this repo + // again on the next go-round. + // + // To enable this we have a dummy file in our checkout, .cargo-ok, which + // if present means that the repo has been successfully reset and is + // ready to go. Hence if we start to do a reset, we make sure this file + // *doesn't* exist, and then once we're done we create the file. + let ok_file = self.location.join(".cargo-ok"); + let _ = paths::remove_file(&ok_file); info!("reset {} to {}", self.repo.path().display(), self.revision); - let object = try!(self.repo.find_object(self.revision.0, None)); - try!(self.repo.reset(&object, git2::ResetType::Hard, None)); + let object = self.repo.find_object(self.revision.0, None)?; + reset(&self.repo, &object, config)?; + File::create(ok_file)?; Ok(()) } - fn update_submodules(&self) -> CargoResult<()> { - return update_submodules(&self.repo); + fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> { + return update_submodules(&self.repo, cargo_config); - fn update_submodules(repo: &git2::Repository) -> CargoResult<()> { + fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> { info!("update submodules for: {:?}", repo.workdir().unwrap()); - for mut child in try!(repo.submodules()).into_iter() { - try!(child.init(false)); - let url = try!(child.url().chain_error(|| { - internal("non-utf8 url for submodule") - })); - - // A submodule which is listed in .gitmodules but not actually - // checked out will not have a head id, so we should ignore it. - let head = match child.head_id() { - Some(head) => head, - None => continue, - }; - - // If the submodule hasn't been checked out yet, we need to - // clone it. If it has been checked out and the head is the same - // as the submodule's head, then we can bail out and go to the - // next submodule. - let head_and_repo = child.open().and_then(|repo| { - let target = try!(repo.head()).target(); - Ok((target, repo)) - }); - let repo = match head_and_repo { - Ok((head, repo)) => { - if child.head_id() == head { - continue - } - repo - } - Err(..) => { - let path = repo.workdir().unwrap().join(child.path()); - try!(git2::Repository::clone(url, &path)) - } - }; - - // Fetch data from origin and reset to the head commit - let refspec = "refs/heads/*:refs/heads/*"; - try!(fetch(&repo, url, refspec).chain_error(|| { - internal(format!("failed to fetch submodule `{}` from {}", - child.name().unwrap_or(""), url)) - })); - - let obj = try!(repo.find_object(head, None)); - try!(repo.reset(&obj, git2::ResetType::Hard, None)); - try!(update_submodules(&repo)); + for mut child in repo.submodules()? { + update_submodule(repo, &mut child, cargo_config).chain_err(|| { + format!( + "failed to update submodule `{}`", + child.name().unwrap_or("") + ) + })?; } Ok(()) } + + fn update_submodule( + parent: &git2::Repository, + child: &mut git2::Submodule<'_>, + cargo_config: &Config, + ) -> CargoResult<()> { + child.init(false)?; + let url = child + .url() + .ok_or_else(|| internal("non-utf8 url for submodule"))?; + + // A submodule which is listed in .gitmodules but not actually + // checked out will not have a head id, so we should ignore it. + let head = match child.head_id() { + Some(head) => head, + None => return Ok(()), + }; + + // If the submodule hasn't been checked out yet, we need to + // clone it. If it has been checked out and the head is the same + // as the submodule's head, then we can skip an update and keep + // recursing. + let head_and_repo = child.open().and_then(|repo| { + let target = repo.head()?.target(); + Ok((target, repo)) + }); + let mut repo = match head_and_repo { + Ok((head, repo)) => { + if child.head_id() == head { + return update_submodules(&repo, cargo_config); + } + repo + } + Err(..) => { + let path = parent.workdir().unwrap().join(child.path()); + let _ = paths::remove_dir_all(&path); + init(&path, false)? + } + }; + + // Fetch data from origin and reset to the head commit + let refspec = "refs/heads/*:refs/heads/*"; + let url = url.into_url()?; + fetch(&mut repo, &url, refspec, cargo_config).chain_err(|| { + internal(format!( + "failed to fetch submodule `{}` from {}", + child.name().unwrap_or(""), + url + )) + })?; + + let obj = repo.find_object(head, None)?; + reset(&repo, &obj, cargo_config)?; + update_submodules(&repo, cargo_config) + } } } -fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) - -> CargoResult - where F: FnMut(&mut git2::Credentials) -> CargoResult +/// Prepare the authentication callbacks for cloning a git repository. +/// +/// The main purpose of this function is to construct the "authentication +/// callback" which is used to clone a repository. This callback will attempt to +/// find the right authentication on the system (without user input) and will +/// guide libgit2 in doing so. +/// +/// The callback is provided `allowed` types of credentials, and we try to do as +/// much as possible based on that: +/// +/// * Prioritize SSH keys from the local ssh agent as they're likely the most +/// reliable. The username here is prioritized from the credential +/// callback, then from whatever is configured in git itself, and finally +/// we fall back to the generic user of `git`. +/// +/// * If a username/password is allowed, then we fallback to git2-rs's +/// implementation of the credential helper. This is what is configured +/// with `credential.helper` in git, and is the interface for the macOS +/// keychain, for example. +/// +/// * After the above two have failed, we just kinda grapple attempting to +/// return *something*. +/// +/// If any form of authentication fails, libgit2 will repeatedly ask us for +/// credentials until we give it a reason to not do so. To ensure we don't +/// just sit here looping forever we keep track of authentications we've +/// attempted and we don't try the same ones again. +fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult +where + F: FnMut(&mut git2::Credentials<'_>) -> CargoResult, { - // Prepare the authentication callbacks. - // - // We check the `allowed` types of credentials, and we try to do as much as - // possible based on that: - // - // * Prioritize SSH keys from the local ssh agent as they're likely the most - // reliable. The username here is prioritized from the credential - // callback, then from whatever is configured in git itself, and finally - // we fall back to the generic user of `git`. - // - // * If a username/password is allowed, then we fallback to git2-rs's - // implementation of the credential helper. This is what is configured - // with `credential.helper` in git, and is the interface for the OSX - // keychain, for example. - // - // * After the above two have failed, we just kinda grapple attempting to - // return *something*. - // - // Note that we keep track of the number of times we've called this callback - // because libgit2 will repeatedly give us credentials until we give it a - // reason to not do so. If we've been called once and our credentials failed - // then we'll be called again, and in this case we assume that the reason - // was because the credentials were wrong. let mut cred_helper = git2::CredentialHelper::new(url); cred_helper.config(cfg); - let mut called = 0; - let res = f(&mut |url, username, allowed| { - called += 1; - if called >= 2 { - return Err(git2::Error::from_str("no authentication available")) + + let mut ssh_username_requested = false; + let mut cred_helper_bad = None; + let mut ssh_agent_attempts = Vec::new(); + let mut any_attempts = false; + let mut tried_sshkey = false; + + let mut res = f(&mut |url, username, allowed| { + any_attempts = true; + // libgit2's "USERNAME" authentication actually means that it's just + // asking us for a username to keep going. This is currently only really + // used for SSH authentication and isn't really an authentication type. + // The logic currently looks like: + // + // let user = ...; + // if (user.is_null()) + // user = callback(USERNAME, null, ...); + // + // callback(SSH_KEY, user, ...) + // + // So if we're being called here then we know that (a) we're using ssh + // authentication and (b) no username was specified in the URL that + // we're trying to clone. We need to guess an appropriate username here, + // but that may involve a few attempts. Unfortunately we can't switch + // usernames during one authentication session with libgit2, so to + // handle this we bail out of this authentication session after setting + // the flag `ssh_username_requested`, and then we handle this below. + if allowed.contains(git2::CredentialType::USERNAME) { + debug_assert!(username.is_none()); + ssh_username_requested = true; + return Err(git2::Error::from_str("gonna try usernames later")); + } + + // An "SSH_KEY" authentication indicates that we need some sort of SSH + // authentication. This can currently either come from the ssh-agent + // process or from a raw in-memory SSH key. Cargo only supports using + // ssh-agent currently. + // + // If we get called with this then the only way that should be possible + // is if a username is specified in the URL itself (e.g., `username` is + // Some), hence the unwrap() here. We try custom usernames down below. + if allowed.contains(git2::CredentialType::SSH_KEY) && !tried_sshkey { + // If ssh-agent authentication fails, libgit2 will keep + // calling this callback asking for other authentication + // methods to try. Make sure we only try ssh-agent once, + // to avoid looping forever. + tried_sshkey = true; + let username = username.unwrap(); + debug_assert!(!ssh_username_requested); + ssh_agent_attempts.push(username.to_string()); + return git2::Cred::ssh_key_from_agent(username); + } + + // Sometimes libgit2 will ask for a username/password in plaintext. This + // is where Cargo would have an interactive prompt if we supported it, + // but we currently don't! Right now the only way we support fetching a + // plaintext password is through the `credential.helper` support, so + // fetch that here. + // + // If ssh-agent authentication fails, libgit2 will keep calling this + // callback asking for other authentication methods to try. Check + // cred_helper_bad to make sure we only try the git credentail helper + // once, to avoid looping forever. + if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) && cred_helper_bad.is_none() + { + let r = git2::Cred::credential_helper(cfg, url, username); + cred_helper_bad = Some(r.is_err()); + return r; } - if allowed.contains(git2::SSH_KEY) || - allowed.contains(git2::USERNAME) { - let user = username.map(|s| s.to_string()) - .or_else(|| cred_helper.username.clone()) - .unwrap_or("git".to_string()); - if allowed.contains(git2::USERNAME) { - git2::Cred::username(&user) + + // I'm... not sure what the DEFAULT kind of authentication is, but seems + // easy to support? + if allowed.contains(git2::CredentialType::DEFAULT) { + return git2::Cred::default(); + } + + // Whelp, we tried our best + Err(git2::Error::from_str("no authentication available")) + }); + + // Ok, so if it looks like we're going to be doing ssh authentication, we + // want to try a few different usernames as one wasn't specified in the URL + // for us to use. In order, we'll try: + // + // * A credential helper's username for this URL, if available. + // * This account's username. + // * "git" + // + // We have to restart the authentication session each time (due to + // constraints in libssh2 I guess? maybe this is inherent to ssh?), so we + // call our callback, `f`, in a loop here. + if ssh_username_requested { + debug_assert!(res.is_err()); + let mut attempts = Vec::new(); + attempts.push("git".to_string()); + if let Ok(s) = env::var("USER").or_else(|_| env::var("USERNAME")) { + attempts.push(s); + } + if let Some(ref s) = cred_helper.username { + attempts.push(s.clone()); + } + + while let Some(s) = attempts.pop() { + // We should get `USERNAME` first, where we just return our attempt, + // and then after that we should get `SSH_KEY`. If the first attempt + // fails we'll get called again, but we don't have another option so + // we bail out. + let mut attempts = 0; + res = f(&mut |_url, username, allowed| { + if allowed.contains(git2::CredentialType::USERNAME) { + return git2::Cred::username(&s); + } + if allowed.contains(git2::CredentialType::SSH_KEY) { + debug_assert_eq!(Some(&s[..]), username); + attempts += 1; + if attempts == 1 { + ssh_agent_attempts.push(s.to_string()); + return git2::Cred::ssh_key_from_agent(&s); + } + } + Err(git2::Error::from_str("no authentication available")) + }); + + // If we made two attempts then that means: + // + // 1. A username was requested, we returned `s`. + // 2. An ssh key was requested, we returned to look up `s` in the + // ssh agent. + // 3. For whatever reason that lookup failed, so we were asked again + // for another mode of authentication. + // + // Essentially, if `attempts == 2` then in theory the only error was + // that this username failed to authenticate (e.g., no other network + // errors happened). Otherwise something else is funny so we bail + // out. + if attempts != 2 { + break; + } + } + } + + if res.is_ok() || !any_attempts { + return res.map_err(From::from); + } + + // In the case of an authentication failure (where we tried something) then + // we try to give a more helpful error message about precisely what we + // tried. + let res = res.map_err(failure::Error::from).chain_err(|| { + let mut msg = "failed to authenticate when downloading \ + repository" + .to_string(); + if !ssh_agent_attempts.is_empty() { + let names = ssh_agent_attempts + .iter() + .map(|s| format!("`{}`", s)) + .collect::>() + .join(", "); + msg.push_str(&format!( + "\nattempted ssh-agent authentication, but \ + none of the usernames {} succeeded", + names + )); + } + if let Some(failed_cred_helper) = cred_helper_bad { + if failed_cred_helper { + msg.push_str( + "\nattempted to find username/password via \ + git's `credential.helper` support, but failed", + ); } else { - git2::Cred::ssh_key_from_agent(&user) + msg.push_str( + "\nattempted to find username/password via \ + `credential.helper`, but maybe the found \ + credentials were incorrect", + ); } - } else if allowed.contains(git2::USER_PASS_PLAINTEXT) { - git2::Cred::credential_helper(cfg, url, username) - } else if allowed.contains(git2::DEFAULT) { - git2::Cred::default() - } else { - Err(git2::Error::from_str("no authentication available")) } + msg + })?; + Ok(res) +} + +fn reset(repo: &git2::Repository, obj: &git2::Object<'_>, config: &Config) -> CargoResult<()> { + let mut pb = Progress::new("Checkout", config); + let mut opts = git2::build::CheckoutBuilder::new(); + opts.progress(|_, cur, max| { + drop(pb.tick(cur, max)); }); - if called > 0 { - res.chain_error(|| { - human("failed to authenticate when downloading repository") - }) - } else { - res - } + repo.reset(obj, git2::ResetType::Hard, Some(&mut opts))?; + Ok(()) +} + +pub fn with_fetch_options( + git_config: &git2::Config, + url: &Url, + config: &Config, + cb: &mut dyn FnMut(git2::FetchOptions<'_>) -> CargoResult<()>, +) -> CargoResult<()> { + let mut progress = Progress::new("Fetch", config); + network::with_retry(config, || { + with_authentication(url.as_str(), git_config, |f| { + let mut rcb = git2::RemoteCallbacks::new(); + rcb.credentials(f); + + rcb.transfer_progress(|stats| { + progress + .tick(stats.indexed_objects(), stats.total_objects()) + .is_ok() + }); + + // Create a local anonymous remote in the repository to fetch the + // url + let mut opts = git2::FetchOptions::new(); + opts.remote_callbacks(rcb) + .download_tags(git2::AutotagOption::All); + cb(opts) + })?; + Ok(()) + }) } -pub fn fetch(repo: &git2::Repository, url: &str, - refspec: &str) -> CargoResult<()> { - // Create a local anonymous remote in the repository to fetch the url - - with_authentication(url, &try!(repo.config()), |f| { - let mut cb = git2::RemoteCallbacks::new(); - cb.credentials(f); - let mut remote = try!(repo.remote_anonymous(&url, Some(refspec))); - try!(remote.add_fetch("refs/tags/*:refs/tags/*")); - remote.set_callbacks(cb); - try!(remote.fetch(&["refs/tags/*:refs/tags/*", refspec], None)); +pub fn fetch( + repo: &mut git2::Repository, + url: &Url, + refspec: &str, + config: &Config, +) -> CargoResult<()> { + if config.frozen() { + failure::bail!( + "attempting to update a git repository, but --frozen \ + was specified" + ) + } + if !config.network_allowed() { + failure::bail!("can't update a git repository in the offline mode") + } + + // If we're fetching from GitHub, attempt GitHub's special fast path for + // testing if we've already got an up-to-date copy of the repository + if url.host_str() == Some("github.com") { + if let Ok(oid) = repo.refname_to_id("refs/remotes/origin/master") { + let mut handle = config.http()?.borrow_mut(); + debug!("attempting GitHub fast path for {}", url); + if github_up_to_date(&mut handle, url, &oid) { + return Ok(()); + } else { + debug!("fast path failed, falling back to a git fetch"); + } + } + } + + // We reuse repositories quite a lot, so before we go through and update the + // repo check to see if it's a little too old and could benefit from a gc. + // In theory this shouldn't be too too expensive compared to the network + // request we're about to issue. + maybe_gc_repo(repo)?; + + // Unfortunately `libgit2` is notably lacking in the realm of authentication + // when compared to the `git` command line. As a result, allow an escape + // hatch for users that would prefer to use `git`-the-CLI for fetching + // repositories instead of `libgit2`-the-library. This should make more + // flavors of authentication possible while also still giving us all the + // speed and portability of using `libgit2`. + if let Some(val) = config.get_bool("net.git-fetch-with-cli")? { + if val.val { + return fetch_with_cli(repo, url, refspec, config); + } + } + + debug!("doing a fetch for {}", url); + let git_config = git2::Config::open_default()?; + with_fetch_options(&git_config, url, config, &mut |mut opts| { + // The `fetch` operation here may fail spuriously due to a corrupt + // repository. It could also fail, however, for a whole slew of other + // reasons (aka network related reasons). We want Cargo to automatically + // recover from corrupt repositories, but we don't want Cargo to stomp + // over other legitimate errors. + // + // Consequently we save off the error of the `fetch` operation and if it + // looks like a "corrupt repo" error then we blow away the repo and try + // again. If it looks like any other kind of error, or if we've already + // blown away the repository, then we want to return the error as-is. + let mut repo_reinitialized = false; + loop { + debug!("initiating fetch of {} from {}", refspec, url); + let res = repo + .remote_anonymous(url.as_str())? + .fetch(&[refspec], Some(&mut opts), None); + let err = match res { + Ok(()) => break, + Err(e) => e, + }; + debug!("fetch failed: {}", err); + + if !repo_reinitialized && err.class() == git2::ErrorClass::Reference { + repo_reinitialized = true; + debug!( + "looks like this is a corrupt repository, reinitializing \ + and trying again" + ); + if reinitialize(repo).is_ok() { + continue; + } + } + + return Err(err.into()); + } Ok(()) }) } + +fn fetch_with_cli( + repo: &mut git2::Repository, + url: &Url, + refspec: &str, + config: &Config, +) -> CargoResult<()> { + let mut cmd = process("git"); + cmd.arg("fetch") + .arg("--tags") // fetch all tags + .arg("--force") // handle force pushes + .arg("--update-head-ok") // see discussion in #2078 + .arg(url.to_string()) + .arg(refspec) + // If cargo is run by git (for example, the `exec` command in `git + // rebase`), the GIT_DIR is set by git and will point to the wrong + // location (this takes precedence over the cwd). Make sure this is + // unset so git will look at cwd for the repo. + .env_remove("GIT_DIR") + // The reset of these may not be necessary, but I'm including them + // just to be extra paranoid and avoid any issues. + .env_remove("GIT_WORK_TREE") + .env_remove("GIT_INDEX_FILE") + .env_remove("GIT_OBJECT_DIRECTORY") + .env_remove("GIT_ALTERNATE_OBJECT_DIRECTORIES") + .cwd(repo.path()); + config + .shell() + .verbose(|s| s.status("Running", &cmd.to_string()))?; + cmd.exec_with_output()?; + Ok(()) +} + +/// Cargo has a bunch of long-lived git repositories in its global cache and +/// some, like the index, are updated very frequently. Right now each update +/// creates a new "pack file" inside the git database, and over time this can +/// cause bad performance and bad current behavior in libgit2. +/// +/// One pathological use case today is where libgit2 opens hundreds of file +/// descriptors, getting us dangerously close to blowing out the OS limits of +/// how many fds we can have open. This is detailed in #4403. +/// +/// To try to combat this problem we attempt a `git gc` here. Note, though, that +/// we may not even have `git` installed on the system! As a result we +/// opportunistically try a `git gc` when the pack directory looks too big, and +/// failing that we just blow away the repository and start over. +fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> { + // Here we arbitrarily declare that if you have more than 100 files in your + // `pack` folder that we need to do a gc. + let entries = match repo.path().join("objects/pack").read_dir() { + Ok(e) => e.count(), + Err(_) => { + debug!("skipping gc as pack dir appears gone"); + return Ok(()); + } + }; + let max = env::var("__CARGO_PACKFILE_LIMIT") + .ok() + .and_then(|s| s.parse::().ok()) + .unwrap_or(100); + if entries < max { + debug!("skipping gc as there's only {} pack files", entries); + return Ok(()); + } + + // First up, try a literal `git gc` by shelling out to git. This is pretty + // likely to fail though as we may not have `git` installed. Note that + // libgit2 doesn't currently implement the gc operation, so there's no + // equivalent there. + match Command::new("git") + .arg("gc") + .current_dir(repo.path()) + .output() + { + Ok(out) => { + debug!( + "git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}", + out.status, + String::from_utf8_lossy(&out.stdout), + String::from_utf8_lossy(&out.stderr) + ); + if out.status.success() { + let new = git2::Repository::open(repo.path())?; + mem::replace(repo, new); + return Ok(()); + } + } + Err(e) => debug!("git-gc failed to spawn: {}", e), + } + + // Alright all else failed, let's start over. + reinitialize(repo) +} + +fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> { + // Here we want to drop the current repository object pointed to by `repo`, + // so we initialize temporary repository in a sub-folder, blow away the + // existing git folder, and then recreate the git repo. Finally we blow away + // the `tmp` folder we allocated. + let path = repo.path().to_path_buf(); + debug!("reinitializing git repo at {:?}", path); + let tmp = path.join("tmp"); + let bare = !repo.path().ends_with(".git"); + *repo = init(&tmp, false)?; + for entry in path.read_dir()? { + let entry = entry?; + if entry.file_name().to_str() == Some("tmp") { + continue; + } + let path = entry.path(); + drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path))); + } + *repo = init(&path, bare)?; + paths::remove_dir_all(&tmp)?; + Ok(()) +} + +fn init(path: &Path, bare: bool) -> CargoResult { + let mut opts = git2::RepositoryInitOptions::new(); + // Skip anything related to templates, they just call all sorts of issues as + // we really don't want to use them yet they insist on being used. See #6240 + // for an example issue that comes up. + opts.external_template(false); + opts.bare(bare); + Ok(git2::Repository::init_opts(&path, &opts)?) +} + +/// Updating the index is done pretty regularly so we want it to be as fast as +/// possible. For registries hosted on GitHub (like the crates.io index) there's +/// a fast path available to use [1] to tell us that there's no updates to be +/// made. +/// +/// This function will attempt to hit that fast path and verify that the `oid` +/// is actually the current `master` branch of the repository. If `true` is +/// returned then no update needs to be performed, but if `false` is returned +/// then the standard update logic still needs to happen. +/// +/// [1]: https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference +/// +/// Note that this function should never cause an actual failure because it's +/// just a fast path. As a result all errors are ignored in this function and we +/// just return a `bool`. Any real errors will be reported through the normal +/// update path above. +fn github_up_to_date(handle: &mut Easy, url: &Url, oid: &git2::Oid) -> bool { + macro_rules! r#try { + ($e:expr) => { + match $e { + Some(e) => e, + None => return false, + } + }; + } + + // This expects GitHub urls in the form `github.com/user/repo` and nothing + // else + let mut pieces = r#try!(url.path_segments()); + let username = r#try!(pieces.next()); + let repo = r#try!(pieces.next()); + if pieces.next().is_some() { + return false; + } + + let url = format!( + "https://api.github.com/repos/{}/{}/commits/master", + username, repo + ); + r#try!(handle.get(true).ok()); + r#try!(handle.url(&url).ok()); + r#try!(handle.useragent("cargo").ok()); + let mut headers = List::new(); + r#try!(headers.append("Accept: application/vnd.github.3.sha").ok()); + r#try!(headers.append(&format!("If-None-Match: \"{}\"", oid)).ok()); + r#try!(handle.http_headers(headers).ok()); + r#try!(handle.perform().ok()); + + r#try!(handle.response_code().ok()) == 304 +} diff --git a/src/cargo/sources/mod.rs b/src/cargo/sources/mod.rs index 7db73619311..d96a05639ef 100644 --- a/src/cargo/sources/mod.rs +++ b/src/cargo/sources/mod.rs @@ -1,7 +1,13 @@ -pub use self::path::PathSource; +pub use self::config::SourceConfigMap; +pub use self::directory::DirectorySource; pub use self::git::GitSource; -pub use self::registry::RegistrySource; +pub use self::path::PathSource; +pub use self::registry::{RegistrySource, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; +pub use self::replaced::ReplacedSource; -pub mod path; +pub mod config; +pub mod directory; pub mod git; +pub mod path; pub mod registry; +pub mod replaced; diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 0e3668b42b4..af6d458c345 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -1,85 +1,91 @@ -use std::cmp; use std::fmt::{self, Debug, Formatter}; use std::fs; -use std::io::prelude::*; use std::path::{Path, PathBuf}; use filetime::FileTime; -use git2; -use glob::Pattern; +use ignore::gitignore::GitignoreBuilder; +use ignore::Match; +use log::{trace, warn}; -use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry}; -use ops; -use util::{self, CargoResult, internal, internal_error, human, ChainError}; -use util::Config; +use crate::core::source::MaybePackage; +use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; +use crate::ops; +use crate::util::{internal, paths, CargoResult, CargoResultExt, Config}; pub struct PathSource<'cfg> { - id: SourceId, + source_id: SourceId, path: PathBuf, updated: bool, packages: Vec, config: &'cfg Config, + recursive: bool, } -// TODO: Figure out if packages should be discovered in new or self should be -// mut and packages are discovered in update impl<'cfg> PathSource<'cfg> { - pub fn for_path(path: &Path, config: &'cfg Config) - -> CargoResult> { - trace!("PathSource::for_path; path={}", path.display()); - Ok(PathSource::new(path, &try!(SourceId::for_path(path)), config)) - } - - /// Invoked with an absolute path to a directory that contains a Cargo.toml. - /// The source will read the manifest and find any other packages contained - /// in the directory structure reachable by the root manifest. - pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) - -> PathSource<'cfg> { - trace!("new; id={}", id); - + /// Invoked with an absolute path to a directory that contains a `Cargo.toml`. + /// + /// This source will only return the package at precisely the `path` + /// specified, and it will be an error if there's not a package at `path`. + pub fn new(path: &Path, source_id: SourceId, config: &'cfg Config) -> PathSource<'cfg> { PathSource { - id: id.clone(), + source_id, path: path.to_path_buf(), updated: false, packages: Vec::new(), - config: config, + config, + recursive: false, } } - pub fn root_package(&self) -> CargoResult { + /// Creates a new source which is walked recursively to discover packages. + /// + /// This is similar to the `new` method except that instead of requiring a + /// valid package to be present at `root` the folder is walked entirely to + /// crawl for packages. + /// + /// Note that this should be used with care and likely shouldn't be chosen + /// by default! + pub fn new_recursive(root: &Path, id: SourceId, config: &'cfg Config) -> PathSource<'cfg> { + PathSource { + recursive: true, + ..PathSource::new(root, id, config) + } + } + + pub fn preload_with(&mut self, pkg: Package) { + assert!(!self.updated); + assert!(!self.recursive); + assert!(self.packages.is_empty()); + self.updated = true; + self.packages.push(pkg); + } + + pub fn root_package(&mut self) -> CargoResult { trace!("root_package; source={:?}", self); - if !self.updated { - return Err(internal("source has not been updated")) - } + self.update()?; match self.packages.iter().find(|p| p.root() == &*self.path) { Some(pkg) => Ok(pkg.clone()), - None => Err(internal("no package found in source")) + None => Err(internal("no package found in source")), } } - fn read_packages(&self) -> CargoResult> { + pub fn read_packages(&self) -> CargoResult> { if self.updated { Ok(self.packages.clone()) - } else if self.id.is_path() && self.id.precise().is_some() { - // If our source id is a path and it's listed with a precise - // version, then it means that we're not allowed to have nested - // dependencies (they've been rewritten to crates.io dependencies) - // In this case we specifically read just one package, not a list of - // packages. + } else if self.recursive { + ops::read_packages(&self.path, self.source_id, self.config) + } else { let path = self.path.join("Cargo.toml"); - let (pkg, _) = try!(ops::read_package(&path, &self.id, - self.config)); + let (pkg, _) = ops::read_package(&path, self.source_id, self.config)?; Ok(vec![pkg]) - } else { - ops::read_packages(&self.path, &self.id, self.config) } } /// List all files relevant to building this package inside this source. /// - /// This function will use the appropriate methods to determine what is the + /// This function will use the appropriate methods to determine the /// set of files underneath this source's directory which are relevant for /// building `pkg`. /// @@ -88,124 +94,207 @@ impl<'cfg> PathSource<'cfg> { /// use other methods like .gitignore to filter the list of files. pub fn list_files(&self, pkg: &Package) -> CargoResult> { let root = pkg.root(); + let no_include_option = pkg.manifest().include().is_empty(); - let parse = |p: &String| { - Pattern::new(p).map_err(|e| { - human(format!("could not parse pattern `{}`: {}", p, e)) - }) + let mut exclude_builder = GitignoreBuilder::new(root); + for rule in pkg.manifest().exclude() { + exclude_builder.add_line(None, rule)?; + } + let ignore_exclude = exclude_builder.build()?; + + let mut include_builder = GitignoreBuilder::new(root); + for rule in pkg.manifest().include() { + include_builder.add_line(None, rule)?; + } + let ignore_include = include_builder.build()?; + + let ignore_should_package = |relative_path: &Path| -> CargoResult { + // "Include" and "exclude" options are mutually exclusive. + if no_include_option { + match ignore_exclude + .matched_path_or_any_parents(relative_path, /* is_dir */ false) + { + Match::None => Ok(true), + Match::Ignore(_) => Ok(false), + Match::Whitelist(_) => Ok(true), + } + } else { + match ignore_include + .matched_path_or_any_parents(relative_path, /* is_dir */ false) + { + Match::None => Ok(false), + Match::Ignore(_) => Ok(true), + Match::Whitelist(_) => Ok(false), + } + } }; - let exclude = try!(pkg.manifest().exclude().iter() - .map(|p| parse(p)).collect::, _>>()); - let include = try!(pkg.manifest().include().iter() - .map(|p| parse(p)).collect::, _>>()); - - let mut filter = |p: &Path| { - let relative_path = util::without_prefix(p, &root).unwrap(); - include.iter().any(|p| p.matches_path(&relative_path)) || { - include.len() == 0 && - !exclude.iter().any(|p| p.matches_path(&relative_path)) + + let mut filter = |path: &Path| -> CargoResult { + let relative_path = path.strip_prefix(root)?; + + let rel = relative_path.as_os_str(); + if rel == "Cargo.lock" { + return Ok(pkg.include_lockfile()); + } else if rel == "Cargo.toml" { + return Ok(true); } + + ignore_should_package(relative_path) }; - // If this package is a git repository, then we really do want to query - // the git repository as it takes into account items such as .gitignore. - // We're not quite sure where the git repository is, however, so we do a - // bit of a probe. + // Attempt Git-prepopulate only if no `include` (see rust-lang/cargo#4135). + if no_include_option { + if let Some(result) = self.discover_git_and_list_files(pkg, root, &mut filter) { + return result; + } + } + self.list_files_walk(pkg, &mut filter) + } + + // Returns `Some(_)` if found sibling `Cargo.toml` and `.git` directory; + // otherwise, caller should fall back on full file list. + fn discover_git_and_list_files( + &self, + pkg: &Package, + root: &Path, + filter: &mut dyn FnMut(&Path) -> CargoResult, + ) -> Option>> { + // If this package is in a Git repository, then we really do want to + // query the Git repository as it takes into account items such as + // `.gitignore`. We're not quite sure where the Git repository is, + // however, so we do a bit of a probe. // - // We check all packages in this source that are ancestors of the - // specified package (including the same package) to see if they're at - // the root of the git repository. This isn't always true, but it'll get - // us there most of the time!. - let repo = self.packages.iter() - .map(|pkg| pkg.root()) - .filter(|path| root.starts_with(path)) - .filter_map(|path| git2::Repository::open(&path).ok()) - .next(); - match repo { - Some(repo) => self.list_files_git(pkg, repo, &mut filter), - None => self.list_files_walk(pkg, &mut filter), + // We walk this package's path upwards and look for a sibling + // `Cargo.toml` and `.git` directory. If we find one then we assume that + // we're part of that repository. + let mut cur = root; + loop { + if cur.join("Cargo.toml").is_file() { + // If we find a Git repository next to this `Cargo.toml`, we still + // check to see if we are indeed part of the index. If not, then + // this is likely an unrelated Git repo, so keep going. + if let Ok(repo) = git2::Repository::open(cur) { + let index = match repo.index() { + Ok(index) => index, + Err(err) => return Some(Err(err.into())), + }; + let path = root.strip_prefix(cur).unwrap().join("Cargo.toml"); + if index.get_path(&path, 0).is_some() { + return Some(self.list_files_git(pkg, &repo, filter)); + } + } + } + // Don't cross submodule boundaries. + if cur.join(".git").is_dir() { + break; + } + match cur.parent() { + Some(parent) => cur = parent, + None => break, + } } + None } - fn list_files_git(&self, pkg: &Package, repo: git2::Repository, - filter: &mut FnMut(&Path) -> bool) - -> CargoResult> { + fn list_files_git( + &self, + pkg: &Package, + repo: &git2::Repository, + filter: &mut dyn FnMut(&Path) -> CargoResult, + ) -> CargoResult> { warn!("list_files_git {}", pkg.package_id()); - let index = try!(repo.index()); - let root = try!(repo.workdir().chain_error(|| { - internal_error("Can't list files on a bare repository.", "") - })); + let index = repo.index()?; + let root = repo + .workdir() + .ok_or_else(|| internal("Can't list files on a bare repository."))?; let pkg_path = pkg.root(); - let mut ret = Vec::new(); + let mut ret = Vec::::new(); - // We use information from the git repository to guide use in traversing + // We use information from the Git repository to guide us in traversing // its tree. The primary purpose of this is to take advantage of the - // .gitignore and auto-ignore files that don't matter. + // `.gitignore` and auto-ignore files that don't matter. // - // Here we're also careful to look at both tracked an untracked files as + // Here we're also careful to look at both tracked and untracked files as // the untracked files are often part of a build and may become relevant // as part of a future commit. let index_files = index.iter().map(|entry| { - use libgit2_sys::git_filemode_t::GIT_FILEMODE_COMMIT; + use libgit2_sys::GIT_FILEMODE_COMMIT; let is_dir = entry.mode == GIT_FILEMODE_COMMIT as u32; - (join(&root, &entry.path), Some(is_dir)) + (join(root, &entry.path), Some(is_dir)) }); let mut opts = git2::StatusOptions::new(); opts.include_untracked(true); - if let Some(suffix) = util::without_prefix(pkg_path, &root) { + if let Ok(suffix) = pkg_path.strip_prefix(root) { opts.pathspec(suffix); } - let statuses = try!(repo.statuses(Some(&mut opts))); - let untracked = statuses.iter().map(|entry| { - (join(&root, entry.path_bytes()), None) + let statuses = repo.statuses(Some(&mut opts))?; + let untracked = statuses.iter().filter_map(|entry| match entry.status() { + // Don't include Cargo.lock if it is untracked. Packaging will + // generate a new one as needed. + git2::Status::WT_NEW if entry.path() != Some("Cargo.lock") => { + Some((join(root, entry.path_bytes()), None)) + } + _ => None, }); - 'outer: for (file_path, is_dir) in index_files.chain(untracked) { - let file_path = try!(file_path); + let mut subpackages_found = Vec::new(); - // Filter out files outside this package. - if !file_path.starts_with(pkg_path) { continue } + for (file_path, is_dir) in index_files.chain(untracked) { + let file_path = file_path?; - // Filter out Cargo.lock and target always - { - let fname = file_path.file_name().and_then(|s| s.to_str()); - if fname == Some("Cargo.lock") { continue } - if fname == Some("target") { continue } + // Filter out files blatantly outside this package. This is helped a + // bit above via the `pathspec` function call, but we need to filter + // the entries in the index as well. + if !file_path.starts_with(pkg_path) { + continue; } - // Filter out sub-packages of this package - for other_pkg in self.packages.iter().filter(|p| *p != pkg) { - let other_path = other_pkg.root(); - if other_path.starts_with(pkg_path) && - file_path.starts_with(other_path) { - continue 'outer; + match file_path.file_name().and_then(|s| s.to_str()) { + // The `target` directory is never included. + Some("target") => continue, + + // Keep track of all sub-packages found and also strip out all + // matches we've found so far. Note, though, that if we find + // our own `Cargo.toml`, we keep going. + Some("Cargo.toml") => { + let path = file_path.parent().unwrap(); + if path != pkg_path { + warn!("subpackage found: {}", path.display()); + ret.retain(|p| !p.starts_with(path)); + subpackages_found.push(path.to_path_buf()); + continue; + } } + + _ => {} } - let is_dir = is_dir.or_else(|| { - fs::metadata(&file_path).ok().map(|m| m.is_dir()) - }).unwrap_or(false); - if is_dir { + // If this file is part of any other sub-package we've found so far, + // skip it. + if subpackages_found.iter().any(|p| file_path.starts_with(p)) { + continue; + } + + if is_dir.unwrap_or_else(|| file_path.is_dir()) { warn!(" found submodule {}", file_path.display()); - let rel = util::without_prefix(&file_path, &root).unwrap(); - let rel = try!(rel.to_str().chain_error(|| { - human(format!("invalid utf-8 filename: {}", rel.display())) - })); + let rel = file_path.strip_prefix(root)?; + let rel = rel.to_str().ok_or_else(|| { + failure::format_err!("invalid utf-8 filename: {}", rel.display()) + })?; // Git submodules are currently only named through `/` path // separators, explicitly not `\` which windows uses. Who knew? let rel = rel.replace(r"\", "/"); match repo.find_submodule(&rel).and_then(|s| s.open()) { Ok(repo) => { - let files = try!(self.list_files_git(pkg, repo, filter)); + let files = self.list_files_git(pkg, &repo, filter)?; ret.extend(files.into_iter()); } Err(..) => { - try!(PathSource::walk(&file_path, &mut ret, false, - filter)); + PathSource::walk(&file_path, &mut ret, false, filter)?; } } - } else if (*filter)(&file_path) { + } else if (*filter)(&file_path)? { // We found a file! warn!(" found {}", file_path.display()); ret.push(file_path); @@ -215,8 +304,8 @@ impl<'cfg> PathSource<'cfg> { #[cfg(unix)] fn join(path: &Path, data: &[u8]) -> CargoResult { - use std::os::unix::prelude::*; use std::ffi::OsStr; + use std::os::unix::prelude::*; Ok(path.join(::from_bytes(data))) } #[cfg(windows)] @@ -224,65 +313,132 @@ impl<'cfg> PathSource<'cfg> { use std::str; match str::from_utf8(data) { Ok(s) => Ok(path.join(s)), - Err(..) => Err(internal("cannot process path in git with a non \ - unicode filename")), + Err(..) => Err(internal( + "cannot process path in git with a non \ + unicode filename", + )), } } } - fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool) - -> CargoResult> { + fn list_files_walk( + &self, + pkg: &Package, + filter: &mut dyn FnMut(&Path) -> CargoResult, + ) -> CargoResult> { let mut ret = Vec::new(); - for pkg in self.packages.iter().filter(|p| *p == pkg) { - let loc = pkg.root(); - try!(PathSource::walk(loc, &mut ret, true, filter)); - } - return Ok(ret); + PathSource::walk(pkg.root(), &mut ret, true, filter)?; + Ok(ret) } - fn walk(path: &Path, ret: &mut Vec, - is_root: bool, filter: &mut FnMut(&Path) -> bool) -> CargoResult<()> - { + fn walk( + path: &Path, + ret: &mut Vec, + is_root: bool, + filter: &mut dyn FnMut(&Path) -> CargoResult, + ) -> CargoResult<()> { if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) { - if (*filter)(path) { + if (*filter)(path)? { ret.push(path.to_path_buf()); } - return Ok(()) + return Ok(()); } - // Don't recurse into any sub-packages that we have + // Don't recurse into any sub-packages that we have. if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() { - return Ok(()) + return Ok(()); } - for dir in try!(fs::read_dir(path)) { - let dir = try!(dir).path(); - match (is_root, dir.file_name().and_then(|s| s.to_str())) { - (_, Some(".git")) | - (true, Some("target")) | - (true, Some("Cargo.lock")) => continue, - _ => {} + + // For package integration tests, we need to sort the paths in a deterministic order to + // be able to match stdout warnings in the same order. + // + // TODO: drop `collect` and sort after transition period and dropping warning tests. + // See rust-lang/cargo#4268 and rust-lang/cargo#4270. + let mut entries: Vec = fs::read_dir(path) + .chain_err(|| format!("cannot read {:?}", path))? + .map(|e| e.unwrap().path()) + .collect(); + entries.sort_unstable_by(|a, b| a.as_os_str().cmp(b.as_os_str())); + for path in entries { + let name = path.file_name().and_then(|s| s.to_str()); + // Skip dotfile directories. + if name.map(|s| s.starts_with('.')) == Some(true) { + continue; + } + if is_root && name == Some("target") { + // Skip Cargo artifacts. + continue; + } + PathSource::walk(&path, ret, false, filter)?; + } + Ok(()) + } + + pub fn last_modified_file(&self, pkg: &Package) -> CargoResult<(FileTime, PathBuf)> { + if !self.updated { + return Err(internal("BUG: source was not updated")); + } + + let mut max = FileTime::zero(); + let mut max_path = PathBuf::new(); + for file in self.list_files(pkg)? { + // An `fs::stat` error here is either because path is a + // broken symlink, a permissions error, or a race + // condition where this path was `rm`-ed -- either way, + // we can ignore the error and treat the path's `mtime` + // as `0`. + let mtime = paths::mtime(&file).unwrap_or_else(|_| FileTime::zero()); + if mtime > max { + max = mtime; + max_path = file; } - try!(PathSource::walk(&dir, ret, false, filter)); } - return Ok(()) + trace!("last modified file {}: {}", self.path.display(), max); + Ok((max, max_path)) + } + + pub fn path(&self) -> &Path { + &self.path } } impl<'cfg> Debug for PathSource<'cfg> { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "the paths source") } } -impl<'cfg> Registry for PathSource<'cfg> { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - self.packages.query(dep) +impl<'cfg> Source for PathSource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + for s in self.packages.iter().map(|p| p.summary()) { + if dep.matches(s) { + f(s.clone()) + } + } + Ok(()) + } + + fn fuzzy_query(&mut self, _dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + for s in self.packages.iter().map(|p| p.summary()) { + f(s.clone()) + } + Ok(()) + } + + fn supports_checksums(&self) -> bool { + false + } + + fn requires_precise(&self) -> bool { + false + } + + fn source_id(&self) -> SourceId { + self.source_id } -} -impl<'cfg> Source for PathSource<'cfg> { fn update(&mut self) -> CargoResult<()> { if !self.updated { - let packages = try!(self.read_packages()); + let packages = self.read_packages()?; self.packages.extend(packages.into_iter()); self.updated = true; } @@ -290,39 +446,34 @@ impl<'cfg> Source for PathSource<'cfg> { Ok(()) } - fn download(&mut self, _: &[PackageId]) -> CargoResult<()>{ - // TODO: assert! that the PackageId is contained by the source - Ok(()) - } + fn download(&mut self, id: PackageId) -> CargoResult { + trace!("getting packages; id={}", id); - fn get(&self, ids: &[PackageId]) -> CargoResult> { - trace!("getting packages; ids={:?}", ids); + let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id); + pkg.cloned() + .map(MaybePackage::Ready) + .ok_or_else(|| internal(format!("failed to find {} in path source", id))) + } - Ok(self.packages.iter() - .filter(|pkg| ids.iter().any(|id| pkg.package_id() == id)) - .map(|pkg| pkg.clone()) - .collect()) + fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { + panic!("no download should have started") } fn fingerprint(&self, pkg: &Package) -> CargoResult { - if !self.updated { - return Err(internal_error("BUG: source was not updated", "")); - } + let (max, max_path) = self.last_modified_file(pkg)?; + Ok(format!("{} ({})", max, max_path.display())) + } - let mut max = FileTime::zero(); - for file in try!(self.list_files(pkg)).iter() { - // An fs::stat error here is either because path is a - // broken symlink, a permissions error, or a race - // condition where this path was rm'ed - either way, - // we can ignore the error and treat the path's mtime - // as 0. - let mtime = fs::metadata(file).map(|meta| { - FileTime::from_last_modification_time(&meta) - }).unwrap_or(FileTime::zero()); - warn!("{} {}", mtime, file.display()); - max = cmp::max(max, mtime); + fn describe(&self) -> String { + match self.source_id.url().to_file_path() { + Ok(path) => path.display().to_string(), + Err(_) => self.source_id.to_string(), } - trace!("fingerprint {}: {}", self.path.display(), max); - Ok(max.to_string()) + } + + fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} + + fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult { + Ok(false) } } diff --git a/src/cargo/sources/registry.rs b/src/cargo/sources/registry.rs deleted file mode 100644 index b9754852d11..00000000000 --- a/src/cargo/sources/registry.rs +++ /dev/null @@ -1,572 +0,0 @@ -//! A `Source` for registry-based packages. -//! -//! # What's a Registry? -//! -//! Registries are central locations where packages can be uploaded to, -//! discovered, and searched for. The purpose of a registry is to have a -//! location that serves as permanent storage for versions of a crate over time. -//! -//! Compared to git sources, a registry provides many packages as well as many -//! versions simultaneously. Git sources can also have commits deleted through -//! rebasings where registries cannot have their versions deleted. -//! -//! # The Index of a Registry -//! -//! One of the major difficulties with a registry is that hosting so many -//! packages may quickly run into performance problems when dealing with -//! dependency graphs. It's infeasible for cargo to download the entire contents -//! of the registry just to resolve one package's dependencies, for example. As -//! a result, cargo needs some efficient method of querying what packages are -//! available on a registry, what versions are available, and what the -//! dependencies for each version is. -//! -//! One method of doing so would be having the registry expose an HTTP endpoint -//! which can be queried with a list of packages and a response of their -//! dependencies and versions is returned. This is somewhat inefficient however -//! as we may have to hit the endpoint many times and we may have already -//! queried for much of the data locally already (for other packages, for -//! example). This also involves inventing a transport format between the -//! registry and Cargo itself, so this route was not taken. -//! -//! Instead, Cargo communicates with registries through a git repository -//! referred to as the Index. The Index of a registry is essentially an easily -//! query-able version of the registry's database for a list of versions of a -//! package as well as a list of dependencies for each version. -//! -//! Using git to host this index provides a number of benefits: -//! -//! * The entire index can be stored efficiently locally on disk. This means -//! that all queries of a registry can happen locally and don't need to touch -//! the network. -//! -//! * Updates of the index are quite efficient. Using git buys incremental -//! updates, compressed transmission, etc for free. The index must be updated -//! each time we need fresh information from a registry, but this is one -//! update of a git repository that probably hasn't changed a whole lot so -//! it shouldn't be too expensive. -//! -//! Additionally, each modification to the index is just appending a line at -//! the end of a file (the exact format is described later). This means that -//! the commits for an index are quite small and easily applied/compressable. -//! -//! ## The format of the Index -//! -//! The index is a store for the list of versions for all packages known, so its -//! format on disk is optimized slightly to ensure that `ls registry` doesn't -//! produce a list of all packages ever known. The index also wants to ensure -//! that there's not a million files which may actually end up hitting -//! filesystem limits at some point. To this end, a few decisions were made -//! about the format of the registry: -//! -//! 1. Each crate will have one file corresponding to it. Each version for a -//! crate will just be a line in this file. -//! 2. There will be two tiers of directories for crate names, under which -//! crates corresponding to those tiers will be located. -//! -//! As an example, this is an example hierarchy of an index: -//! -//! ```notrust -//! . -//! ├── 3 -//! │   └── u -//! │   └── url -//! ├── bz -//! │   └── ip -//! │   └── bzip2 -//! ├── config.json -//! ├── en -//! │   └── co -//! │   └── encoding -//! └── li -//!    ├── bg -//!    │   └── libgit2 -//!    └── nk -//!    └── link-config -//! ``` -//! -//! The root of the index contains a `config.json` file with a few entries -//! corresponding to the registry (see `RegistryConfig` below). -//! -//! Otherwise, there are three numbered directories (1, 2, 3) for crates with -//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the -//! crate files underneath them, while the 3 directory is sharded by the first -//! letter of the crate name. -//! -//! Otherwise the top-level directory contains many two-letter directory names, -//! each of which has many sub-folders with two letters. At the end of all these -//! are the actual crate files themselves. -//! -//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as -//! efficient lookup based on the crate name itself. -//! -//! ## Crate files -//! -//! Each file in the index is the history of one crate over time. Each line in -//! the file corresponds to one version of a crate, stored in JSON format (see -//! the `RegistryPackage` structure below). -//! -//! As new versions are published, new lines are appended to this file. The only -//! modifications to this file that should happen over time are yanks of a -//! particular version. -//! -//! # Downloading Packages -//! -//! The purpose of the Index was to provide an efficient method to resolve the -//! dependency graph for a package. So far we only required one network -//! interaction to update the registry's repository (yay!). After resolution has -//! been performed, however we need to download the contents of packages so we -//! can read the full manifest and build the source code. -//! -//! To accomplish this, this source's `download` method will make an HTTP -//! request per-package requested to download tarballs into a local cache. These -//! tarballs will then be unpacked into a destination folder. -//! -//! Note that because versions uploaded to the registry are frozen forever that -//! the HTTP download and unpacking can all be skipped if the version has -//! already been downloaded and unpacked. This caching allows us to only -//! download a package when absolutely necessary. -//! -//! # Filesystem Hierarchy -//! -//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: -//! -//! ```notrust -//! # A folder under which all registry metadata is hosted (similar to -//! # $HOME/.cargo/git) -//! $HOME/.cargo/registry/ -//! -//! # For each registry that cargo knows about (keyed by hostname + hash) -//! # there is a folder which is the checked out version of the index for -//! # the registry in this location. Note that this is done so cargo can -//! # support multiple registries simultaneously -//! index/ -//! registry1-/ -//! registry2-/ -//! ... -//! -//! # This folder is a cache for all downloaded tarballs from a registry. -//! # Once downloaded and verified, a tarball never changes. -//! cache/ -//! registry1-/-.crate -//! ... -//! -//! # Location in which all tarballs are unpacked. Each tarball is known to -//! # be frozen after downloading, so transitively this folder is also -//! # frozen once its unpacked (it's never unpacked again) -//! src/ -//! registry1-/-/... -//! ... -//! ``` - -use std::collections::HashMap; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::path::PathBuf; - -use curl::http; -use flate2::read::GzDecoder; -use git2; -use rustc_serialize::hex::ToHex; -use rustc_serialize::json; -use tar::Archive; -use url::Url; - -use core::{Source, SourceId, PackageId, Package, Summary, Registry}; -use core::dependency::{Dependency, DependencyInner, Kind}; -use sources::{PathSource, git}; -use util::{CargoResult, Config, internal, ChainError, ToUrl, human}; -use util::{hex, Sha256}; -use ops; - -static DEFAULT: &'static str = "https://github.com/rust-lang/crates.io-index"; - -pub struct RegistrySource<'cfg> { - source_id: SourceId, - checkout_path: PathBuf, - cache_path: PathBuf, - src_path: PathBuf, - config: &'cfg Config, - handle: Option, - sources: Vec>, - hashes: HashMap<(String, String), String>, // (name, vers) => cksum - cache: HashMap>, - updated: bool, -} - -#[derive(RustcDecodable)] -pub struct RegistryConfig { - /// Download endpoint for all crates. This will be appended with - /// `///download` and then will be hit with an HTTP GET - /// request to download the tarball for a crate. - pub dl: String, - - /// API endpoint for the registry. This is what's actually hit to perform - /// operations like yanks, owner modifications, publish new crates, etc. - pub api: String, -} - -#[derive(RustcDecodable)] -struct RegistryPackage { - name: String, - vers: String, - deps: Vec, - features: HashMap>, - cksum: String, - yanked: Option, -} - -#[derive(RustcDecodable)] -struct RegistryDependency { - name: String, - req: String, - features: Vec, - optional: bool, - default_features: bool, - target: Option, - kind: Option, -} - -impl<'cfg> RegistrySource<'cfg> { - pub fn new(source_id: &SourceId, - config: &'cfg Config) -> RegistrySource<'cfg> { - let hash = hex::short_hash(source_id); - let ident = source_id.url().host().unwrap().to_string(); - let part = format!("{}-{}", ident, hash); - RegistrySource { - checkout_path: config.registry_index_path().join(&part), - cache_path: config.registry_cache_path().join(&part), - src_path: config.registry_source_path().join(&part), - config: config, - source_id: source_id.clone(), - handle: None, - sources: Vec::new(), - hashes: HashMap::new(), - cache: HashMap::new(), - updated: false, - } - } - - /// Get the configured default registry URL. - /// - /// This is the main cargo registry by default, but it can be overridden in - /// a .cargo/config - pub fn url(config: &Config) -> CargoResult { - let config = try!(ops::registry_configuration(config)); - let url = config.index.unwrap_or(DEFAULT.to_string()); - url.to_url().map_err(human) - } - - /// Get the default url for the registry - pub fn default_url() -> String { - DEFAULT.to_string() - } - - /// Decode the configuration stored within the registry. - /// - /// This requires that the index has been at least checked out. - pub fn config(&self) -> CargoResult { - let mut f = try!(File::open(&self.checkout_path.join("config.json"))); - let mut contents = String::new(); - try!(f.read_to_string(&mut contents)); - let config = try!(json::decode(&contents)); - Ok(config) - } - - /// Open the git repository for the index of the registry. - /// - /// This will attempt to open an existing checkout, and failing that it will - /// initialize a fresh new directory and git checkout. No remotes will be - /// configured by default. - fn open(&self) -> CargoResult { - match git2::Repository::open(&self.checkout_path) { - Ok(repo) => return Ok(repo), - Err(..) => {} - } - - try!(fs::create_dir_all(&self.checkout_path)); - let _ = fs::remove_dir_all(&self.checkout_path); - let repo = try!(git2::Repository::init(&self.checkout_path)); - Ok(repo) - } - - /// Download the given package from the given url into the local cache. - /// - /// This will perform the HTTP request to fetch the package. This function - /// will only succeed if the HTTP download was successful and the file is - /// then ready for inspection. - /// - /// No action is taken if the package is already downloaded. - fn download_package(&mut self, pkg: &PackageId, url: &Url) - -> CargoResult { - // TODO: should discover filename from the S3 redirect - let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); - let dst = self.cache_path.join(&filename); - if fs::metadata(&dst).is_ok() { return Ok(dst) } - try!(self.config.shell().status("Downloading", pkg)); - - try!(fs::create_dir_all(dst.parent().unwrap())); - let expected_hash = try!(self.hash(pkg)); - let handle = match self.handle { - Some(ref mut handle) => handle, - None => { - self.handle = Some(try!(ops::http_handle(self.config))); - self.handle.as_mut().unwrap() - } - }; - // TODO: don't download into memory (curl-rust doesn't expose it) - let resp = try!(handle.get(url.to_string()).follow_redirects(true).exec()); - if resp.get_code() != 200 && resp.get_code() != 0 { - return Err(internal(format!("Failed to get 200 response from {}\n{}", - url, resp))) - } - - // Verify what we just downloaded - let actual = { - let mut state = Sha256::new(); - state.update(resp.get_body()); - state.finish() - }; - if actual.to_hex() != expected_hash { - return Err(human(format!("Failed to verify the checksum of `{}`", - pkg))) - } - - try!(try!(File::create(&dst)).write_all(resp.get_body())); - Ok(dst) - } - - /// Return the hash listed for a specified PackageId. - fn hash(&mut self, pkg: &PackageId) -> CargoResult { - let key = (pkg.name().to_string(), pkg.version().to_string()); - if let Some(s) = self.hashes.get(&key) { - return Ok(s.clone()) - } - // Ok, we're missing the key, so parse the index file to load it. - try!(self.summaries(pkg.name())); - self.hashes.get(&key).chain_error(|| { - internal(format!("no hash listed for {}", pkg)) - }).map(|s| s.clone()) - } - - /// Unpacks a downloaded package into a location where it's ready to be - /// compiled. - /// - /// No action is taken if the source looks like it's already unpacked. - fn unpack_package(&self, pkg: &PackageId, tarball: PathBuf) - -> CargoResult { - let dst = self.src_path.join(&format!("{}-{}", pkg.name(), - pkg.version())); - if fs::metadata(&dst.join(".cargo-ok")).is_ok() { return Ok(dst) } - - try!(fs::create_dir_all(dst.parent().unwrap())); - let f = try!(File::open(&tarball)); - let gz = try!(GzDecoder::new(f)); - let mut tar = Archive::new(gz); - try!(tar.unpack(dst.parent().unwrap())); - try!(File::create(&dst.join(".cargo-ok"))); - Ok(dst) - } - - /// Parse the on-disk metadata for the package provided - fn summaries(&mut self, name: &str) -> CargoResult<&Vec<(Summary, bool)>> { - if self.cache.contains_key(name) { - return Ok(self.cache.get(name).unwrap()); - } - // see module comment for why this is structured the way it is - let path = self.checkout_path.clone(); - let fs_name = name.chars().flat_map(|c| c.to_lowercase()).collect::(); - let path = match fs_name.len() { - 1 => path.join("1").join(&fs_name), - 2 => path.join("2").join(&fs_name), - 3 => path.join("3").join(&fs_name[..1]).join(&fs_name), - _ => path.join(&fs_name[0..2]) - .join(&fs_name[2..4]) - .join(&fs_name), - }; - let summaries = match File::open(&path) { - Ok(mut f) => { - let mut contents = String::new(); - try!(f.read_to_string(&mut contents)); - let ret: CargoResult>; - ret = contents.lines().filter(|l| l.trim().len() > 0) - .map(|l| self.parse_registry_package(l)) - .collect(); - try!(ret.chain_error(|| { - internal(format!("Failed to parse registry's information \ - for: {}", name)) - })) - } - Err(..) => Vec::new(), - }; - let summaries = summaries.into_iter().filter(|summary| { - summary.0.package_id().name() == name - }).collect(); - self.cache.insert(name.to_string(), summaries); - Ok(self.cache.get(name).unwrap()) - } - - /// Parse a line from the registry's index file into a Summary for a - /// package. - /// - /// The returned boolean is whether or not the summary has been yanked. - fn parse_registry_package(&mut self, line: &str) - -> CargoResult<(Summary, bool)> { - let RegistryPackage { - name, vers, cksum, deps, features, yanked - } = try!(json::decode::(line)); - let pkgid = try!(PackageId::new(&name, &vers, &self.source_id)); - let deps: CargoResult> = deps.into_iter().map(|dep| { - self.parse_registry_dependency(dep) - }).collect(); - let deps = try!(deps); - self.hashes.insert((name, vers), cksum); - Ok((try!(Summary::new(pkgid, deps, features)), yanked.unwrap_or(false))) - } - - /// Converts an encoded dependency in the registry to a cargo dependency - fn parse_registry_dependency(&self, dep: RegistryDependency) - -> CargoResult { - let RegistryDependency { - name, req, features, optional, default_features, target, kind - } = dep; - - let dep = try!(DependencyInner::parse(&name, Some(&req), - &self.source_id)); - let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { - "dev" => Kind::Development, - "build" => Kind::Build, - _ => Kind::Normal, - }; - - // Unfortunately older versions of cargo and/or the registry ended up - // publishing lots of entries where the features array contained the - // empty feature, "", inside. This confuses the resolution process much - // later on and these features aren't actually valid, so filter them all - // out here. - let features = features.into_iter().filter(|s| !s.is_empty()).collect(); - - Ok(dep.set_optional(optional) - .set_default_features(default_features) - .set_features(features) - .set_only_for_platform(target) - .set_kind(kind) - .into_dependency()) - } - - /// Actually perform network operations to update the registry - fn do_update(&mut self) -> CargoResult<()> { - if self.updated { return Ok(()) } - - try!(self.config.shell().status("Updating", - format!("registry `{}`", self.source_id.url()))); - let repo = try!(self.open()); - - // git fetch origin - let url = self.source_id.url().to_string(); - let refspec = "refs/heads/*:refs/remotes/origin/*"; - try!(git::fetch(&repo, &url, refspec).chain_error(|| { - internal(format!("failed to fetch `{}`", url)) - })); - - // git reset --hard origin/master - let reference = "refs/remotes/origin/master"; - let oid = try!(repo.refname_to_id(reference)); - trace!("[{}] updating to rev {}", self.source_id, oid); - let object = try!(repo.find_object(oid, None)); - try!(repo.reset(&object, git2::ResetType::Hard, None)); - self.updated = true; - self.cache.clear(); - Ok(()) - } -} - -impl<'cfg> Registry for RegistrySource<'cfg> { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - // If this is a precise dependency, then it came from a lockfile and in - // theory the registry is known to contain this version. If, however, we - // come back with no summaries, then our registry may need to be - // updated, so we fall back to performing a lazy update. - if dep.source_id().precise().is_some() { - let mut summaries = try!(self.summaries(dep.name())).iter().map(|s| { - s.0.clone() - }).collect::>(); - if try!(summaries.query(dep)).len() == 0 { - try!(self.do_update()); - } - } - - let mut summaries = { - let summaries = try!(self.summaries(dep.name())); - summaries.iter().filter(|&&(_, yanked)| { - dep.source_id().precise().is_some() || !yanked - }).map(|s| s.0.clone()).collect::>() - }; - - // Handle `cargo update --precise` here. If specified, our own source - // will have a precise version listed of the form `=` where - // `` is the name of a crate on this source and `` is the - // version requested (agument to `--precise`). - summaries.retain(|s| { - match self.source_id.precise() { - Some(p) if p.starts_with(dep.name()) => { - let vers = &p[dep.name().len() + 1..]; - s.version().to_string() == vers - } - _ => true, - } - }); - summaries.query(dep) - } -} - -impl<'cfg> Source for RegistrySource<'cfg> { - fn update(&mut self) -> CargoResult<()> { - // If we have an imprecise version then we don't know what we're going - // to look for, so we always attempt to perform an update here. - // - // If we have a precise version, then we'll update lazily during the - // querying phase. Note that precise in this case is only - // `Some("locked")` as other `Some` values indicate a `cargo update - // --precise` request - if self.source_id.precise() != Some("locked") { - try!(self.do_update()); - } - Ok(()) - } - - fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> { - let config = try!(self.config()); - let url = try!(config.dl.to_url().map_err(internal)); - for package in packages.iter() { - if self.source_id != *package.source_id() { continue } - - let mut url = url.clone(); - url.path_mut().unwrap().push(package.name().to_string()); - url.path_mut().unwrap().push(package.version().to_string()); - url.path_mut().unwrap().push("download".to_string()); - let path = try!(self.download_package(package, &url).chain_error(|| { - internal(format!("Failed to download package `{}` from {}", - package, url)) - })); - let path = try!(self.unpack_package(package, path).chain_error(|| { - internal(format!("Failed to unpack package `{}`", package)) - })); - let mut src = PathSource::new(&path, &self.source_id, self.config); - try!(src.update()); - self.sources.push(src); - } - Ok(()) - } - - fn get(&self, packages: &[PackageId]) -> CargoResult> { - let mut ret = Vec::new(); - for src in self.sources.iter() { - ret.extend(try!(src.get(packages)).into_iter()); - } - return Ok(ret); - } - - fn fingerprint(&self, pkg: &Package) -> CargoResult { - Ok(pkg.package_id().version().to_string()) - } -} diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs new file mode 100644 index 00000000000..57d24a8f0aa --- /dev/null +++ b/src/cargo/sources/registry/index.rs @@ -0,0 +1,762 @@ +//! Management of the index of a registry source +//! +//! This module contains management of the index and various operations, such as +//! actually parsing the index, looking for crates, etc. This is intended to be +//! abstract over remote indices (downloaded via git) and local registry indices +//! (which are all just present on the filesystem). +//! +//! ## Index Performance +//! +//! One important aspect of the index is that we want to optimize the "happy +//! path" as much as possible. Whenever you type `cargo build` Cargo will +//! *always* reparse the registry and learn about dependency information. This +//! is done because Cargo needs to learn about the upstream crates.io crates +//! that you're using and ensure that the preexisting `Cargo.lock` still matches +//! the current state of the world. +//! +//! Consequently, Cargo "null builds" (the index that Cargo adds to each build +//! itself) need to be fast when accessing the index. The primary performance +//! optimization here is to avoid parsing JSON blobs from the registry if we +//! don't need them. Most secondary optimizations are centered around removing +//! allocations and such, but avoiding parsing JSON is the #1 optimization. +//! +//! When we get queries from the resolver we're given a `Dependency`. This +//! dependency in turn has a version requirement, and with lock files that +//! already exist these version requirements are exact version requirements +//! `=a.b.c`. This means that we in theory only need to parse one line of JSON +//! per query in the registry, the one that matches version `a.b.c`. +//! +//! The crates.io index, however, is not amenable to this form of query. Instead +//! the crates.io index simply is a file where each line is a JSON blob. To +//! learn about the versions in each JSON blob we would need to parse the JSON, +//! defeating the purpose of trying to parse as little as possible. +//! +//! > Note that as a small aside even *loading* the JSON from the registry is +//! > actually pretty slow. For crates.io and remote registries we don't +//! > actually check out the git index on disk because that takes quite some +//! > time and is quite large. Instead we use `libgit2` to read the JSON from +//! > the raw git objects. This in turn can be slow (aka show up high in +//! > profiles) because libgit2 has to do deflate decompression and such. +//! +//! To solve all these issues a strategy is employed here where Cargo basically +//! creates an index into the index. The first time a package is queried about +//! (first time being for an entire computer) Cargo will load the contents +//! (slowly via libgit2) from the registry. It will then (slowly) parse every +//! single line to learn about its versions. Afterwards, however, Cargo will +//! emit a new file (a cache) which is amenable for speedily parsing in future +//! invocations. +//! +//! This cache file is currently organized by basically having the semver +//! version extracted from each JSON blob. That way Cargo can quickly and easily +//! parse all versions contained and which JSON blob they're associated with. +//! The JSON blob then doesn't actually need to get parsed unless the version is +//! parsed. +//! +//! Altogether the initial measurements of this shows a massive improvement for +//! Cargo null build performance. It's expected that the improvements earned +//! here will continue to grow over time in the sense that the previous +//! implementation (parse all lines each time) actually continues to slow down +//! over time as new versions of a crate are published. In any case when first +//! implemented a null build of Cargo itself would parse 3700 JSON blobs from +//! the registry and load 150 blobs from git. Afterwards it parses 150 JSON +//! blobs and loads 0 files git. Removing 200ms or more from Cargo's startup +//! time is certainly nothing to sneeze at! +//! +//! Note that this is just a high-level overview, there's of course lots of +//! details like invalidating caches and whatnot which are handled below, but +//! hopefully those are more obvious inline in the code itself. + +use std::collections::{HashMap, HashSet}; +use std::fs; +use std::path::Path; +use std::str; + +use log::info; +use semver::{Version, VersionReq}; + +use crate::core::dependency::Dependency; +use crate::core::{InternedString, PackageId, SourceId, Summary}; +use crate::sources::registry::{RegistryData, RegistryPackage}; +use crate::util::{internal, CargoResult, Config, Filesystem, ToSemver}; + +/// Crates.io treats hyphen and underscores as interchangeable, but the index and old Cargo do not. +/// Therefore, the index must store uncanonicalized version of the name so old Cargo's can find it. +/// This loop tries all possible combinations of switching hyphen and underscores to find the +/// uncanonicalized one. As all stored inputs have the correct spelling, we start with the spelling +/// as-provided. +struct UncanonicalizedIter<'s> { + input: &'s str, + num_hyphen_underscore: u32, + hyphen_combination_num: u16, +} + +impl<'s> UncanonicalizedIter<'s> { + fn new(input: &'s str) -> Self { + let num_hyphen_underscore = input.chars().filter(|&c| c == '_' || c == '-').count() as u32; + UncanonicalizedIter { + input, + num_hyphen_underscore, + hyphen_combination_num: 0, + } + } +} + +impl<'s> Iterator for UncanonicalizedIter<'s> { + type Item = String; + + fn next(&mut self) -> Option { + if self.hyphen_combination_num > 0 + && self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore + { + return None; + } + + let ret = Some( + self.input + .chars() + .scan(0u16, |s, c| { + // the check against 15 here's to prevent + // shift overflow on inputs with more then 15 hyphens + if (c == '_' || c == '-') && *s <= 15 { + let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0; + let out = if (c == '_') ^ switch { '_' } else { '-' }; + *s += 1; + Some(out) + } else { + Some(c) + } + }) + .collect(), + ); + self.hyphen_combination_num += 1; + ret + } +} + +#[test] +fn no_hyphen() { + assert_eq!( + UncanonicalizedIter::new("test").collect::>(), + vec!["test".to_string()] + ) +} + +#[test] +fn two_hyphen() { + assert_eq!( + UncanonicalizedIter::new("te-_st").collect::>(), + vec![ + "te-_st".to_string(), + "te__st".to_string(), + "te--st".to_string(), + "te_-st".to_string() + ] + ) +} + +#[test] +fn overflow_hyphen() { + assert_eq!( + UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st") + .take(100) + .count(), + 100 + ) +} + +pub struct RegistryIndex<'cfg> { + source_id: SourceId, + path: Filesystem, + summaries_cache: HashMap, + config: &'cfg Config, +} + +/// An internal cache of summaries for a particular package. +/// +/// A list of summaries are loaded from disk via one of two methods: +/// +/// 1. Primarily Cargo will parse the corresponding file for a crate in the +/// upstream crates.io registry. That's just a JSON blob per line which we +/// can parse, extract the version, and then store here. +/// +/// 2. Alternatively, if Cargo has previously run, we'll have a cached index of +/// dependencies for the upstream index. This is a file that Cargo maintains +/// lazily on the local filesystem and is much faster to parse since it +/// doesn't involve parsing all of the JSON. +/// +/// The outward-facing interface of this doesn't matter too much where it's +/// loaded from, but it's important when reading the implementation to note that +/// we try to parse as little as possible! +#[derive(Default)] +struct Summaries { + /// A raw vector of uninterpreted bytes. This is what `Unparsed` start/end + /// fields are indexes into. If a `Summaries` is loaded from the crates.io + /// index then this field will be empty since nothing is `Unparsed`. + raw_data: Vec, + + /// All known versions of a crate, keyed from their `Version` to the + /// possibly parsed or unparsed version of the full summary. + versions: HashMap, +} + +/// A lazily parsed `IndexSummary`. +enum MaybeIndexSummary { + /// A summary which has not been parsed, The `start` and `end` are pointers + /// into `Summaries::raw_data` which this is an entry of. + Unparsed { start: usize, end: usize }, + + /// An actually parsed summary. + Parsed(IndexSummary), +} + +/// A parsed representation of a summary from the index. +/// +/// In addition to a full `Summary` we have a few auxiliary pieces of +/// information liked `yanked` and what the checksum hash is. +pub struct IndexSummary { + pub summary: Summary, + pub yanked: bool, + pub hash: String, +} + +/// A representation of the cache on disk that Cargo maintains of summaries. +/// Cargo will initially parse all summaries in the registry and will then +/// serialize that into this form and place it in a new location on disk, +/// ensuring that access in the future is much speedier. +#[derive(Default)] +struct SummariesCache<'a> { + versions: Vec<(Version, &'a [u8])>, +} + +impl<'cfg> RegistryIndex<'cfg> { + pub fn new( + source_id: SourceId, + path: &Filesystem, + config: &'cfg Config, + ) -> RegistryIndex<'cfg> { + RegistryIndex { + source_id, + path: path.clone(), + summaries_cache: HashMap::new(), + config, + } + } + + /// Returns the hash listed for a specified `PackageId`. + pub fn hash(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> CargoResult { + let req = VersionReq::exact(pkg.version()); + let summary = self + .summaries(pkg.name(), &req, load)? + .next() + .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))?; + Ok(summary.hash.clone()) + } + + /// Load a list of summaries for `name` package in this registry which + /// match `req` + /// + /// This function will semantically parse the on-disk index, match all + /// versions, and then return an iterator over all summaries which matched. + /// Internally there's quite a few layer of caching to amortize this cost + /// though since this method is called quite a lot on null builds in Cargo. + pub fn summaries<'a, 'b>( + &'a mut self, + name: InternedString, + req: &'b VersionReq, + load: &mut dyn RegistryData, + ) -> CargoResult + 'b> + where + 'a: 'b, + { + let source_id = self.source_id; + + // First up actually parse what summaries we have available. If Cargo + // has run previously this will parse a Cargo-specific cache file rather + // than the registry itself. In effect this is intended to be a quite + // cheap operation. + let summaries = self.load_summaries(name, load)?; + + // Iterate over our summaries, extract all relevant ones which match our + // version requirement, and then parse all corresponding rows in the + // registry. As a reminder this `summaries` method is called for each + // entry in a lock file on every build, so we want to absolutely + // minimize the amount of work being done here and parse as little as + // necessary. + let raw_data = &summaries.raw_data; + Ok(summaries + .versions + .iter_mut() + .filter_map(move |(k, v)| if req.matches(k) { Some(v) } else { None }) + .filter_map(move |maybe| match maybe.parse(raw_data, source_id) { + Ok(summary) => Some(summary), + Err(e) => { + info!("failed to parse `{}` registry package: {}", name, e); + None + } + })) + } + + fn load_summaries( + &mut self, + name: InternedString, + load: &mut dyn RegistryData, + ) -> CargoResult<&mut Summaries> { + // If we've previously loaded what versions are present for `name`, just + // return that since our cache should still be valid. + if self.summaries_cache.contains_key(&name) { + return Ok(self.summaries_cache.get_mut(&name).unwrap()); + } + + // Prepare the `RegistryData` which will lazily initialize internal data + // structures. + load.prepare()?; + + // let root = self.config.assert_package_cache_locked(&self.path); + let root = load.assert_index_locked(&self.path); + let cache_root = root.join(".cache"); + let index_version = load.current_version(); + + // See module comment in `registry/mod.rs` for why this is structured + // the way it is. + let fs_name = name + .chars() + .flat_map(|c| c.to_lowercase()) + .collect::(); + let raw_path = match fs_name.len() { + 1 => format!("1/{}", fs_name), + 2 => format!("2/{}", fs_name), + 3 => format!("3/{}/{}", &fs_name[..1], fs_name), + _ => format!("{}/{}/{}", &fs_name[0..2], &fs_name[2..4], fs_name), + }; + + // Attempt to handle misspellings by searching for a chain of related + // names to the original `raw_path` name. Only return summaries + // associated with the first hit, however. The resolver will later + // reject any candidates that have the wrong name, and with this it'll + // along the way produce helpful "did you mean?" suggestions. + for path in UncanonicalizedIter::new(&raw_path).take(1024) { + let summaries = Summaries::parse( + index_version.as_ref().map(|s| &**s), + root, + &cache_root, + path.as_ref(), + self.source_id, + load, + self.config, + )?; + if let Some(summaries) = summaries { + self.summaries_cache.insert(name, summaries); + return Ok(self.summaries_cache.get_mut(&name).unwrap()); + } + } + + // If nothing was found then this crate doesn't exists, so just use an + // empty `Summaries` list. + self.summaries_cache.insert(name, Summaries::default()); + Ok(self.summaries_cache.get_mut(&name).unwrap()) + } + + pub fn query_inner( + &mut self, + dep: &Dependency, + load: &mut dyn RegistryData, + yanked_whitelist: &HashSet, + f: &mut dyn FnMut(Summary), + ) -> CargoResult<()> { + if self.config.offline() + && self.query_inner_with_online(dep, load, yanked_whitelist, f, false)? != 0 + { + return Ok(()); + // If offline, and there are no matches, try again with online. + // This is necessary for dependencies that are not used (such as + // target-cfg or optional), but are not downloaded. Normally the + // build should succeed if they are not downloaded and not used, + // but they still need to resolve. If they are actually needed + // then cargo will fail to download and an error message + // indicating that the required dependency is unavailable while + // offline will be displayed. + } + self.query_inner_with_online(dep, load, yanked_whitelist, f, true)?; + Ok(()) + } + + fn query_inner_with_online( + &mut self, + dep: &Dependency, + load: &mut dyn RegistryData, + yanked_whitelist: &HashSet, + f: &mut dyn FnMut(Summary), + online: bool, + ) -> CargoResult { + let source_id = self.source_id; + let summaries = self + .summaries(dep.package_name(), dep.version_req(), load)? + // First filter summaries for `--offline`. If we're online then + // everything is a candidate, otherwise if we're offline we're only + // going to consider candidates which are actually present on disk. + // + // Note: This particular logic can cause problems with + // optional dependencies when offline. If at least 1 version + // of an optional dependency is downloaded, but that version + // does not satisfy the requirements, then resolution will + // fail. Unfortunately, whether or not something is optional + // is not known here. + .filter(|s| (online || load.is_crate_downloaded(s.summary.package_id()))) + // Next filter out all yanked packages. Some yanked packages may + // leak throguh if they're in a whitelist (aka if they were + // previously in `Cargo.lock` + .filter(|s| !s.yanked || yanked_whitelist.contains(&s.summary.package_id())) + .map(|s| s.summary.clone()); + + // Handle `cargo update --precise` here. If specified, our own source + // will have a precise version listed of the form + // `=o->` where `` is the name of a crate on + // this source, `` is the version installed and ` is the + // version requested (argument to `--precise`). + let name = dep.package_name().as_str(); + let summaries = summaries.filter(|s| match source_id.precise() { + Some(p) if p.starts_with(name) && p[name.len()..].starts_with('=') => { + let mut vers = p[name.len() + 1..].splitn(2, "->"); + if dep + .version_req() + .matches(&vers.next().unwrap().to_semver().unwrap()) + { + vers.next().unwrap() == s.version().to_string() + } else { + true + } + } + _ => true, + }); + + let mut count = 0; + for summary in summaries { + f(summary); + count += 1; + } + Ok(count) + } + + pub fn is_yanked(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> CargoResult { + let req = VersionReq::exact(pkg.version()); + let found = self + .summaries(pkg.name(), &req, load)? + .any(|summary| summary.yanked); + Ok(found) + } +} + +impl Summaries { + /// Parse out a `Summaries` instances from on-disk state. + /// + /// This will attempt to prefer parsing a previous cache file that already + /// exists from a previous invocation of Cargo (aka you're typing `cargo + /// build` again after typing it previously). If parsing fails or the cache + /// isn't found, then we take a slower path which loads the full descriptor + /// for `relative` from the underlying index (aka typically libgit2 with + /// crates.io) and then parse everything in there. + /// + /// * `index_version` - a version string to describe the current state of + /// the index which for remote registries is the current git sha and + /// for local registries is not available. + /// * `root` - this is the root argument passed to `load` + /// * `cache_root` - this is the root on the filesystem itself of where to + /// store cache files. + /// * `relative` - this is the file we're loading from cache or the index + /// data + /// * `source_id` - the registry's SourceId used when parsing JSON blobs to + /// create summaries. + /// * `load` - the actual index implementation which may be very slow to + /// call. We avoid this if we can. + pub fn parse( + index_version: Option<&str>, + root: &Path, + cache_root: &Path, + relative: &Path, + source_id: SourceId, + load: &mut dyn RegistryData, + config: &Config, + ) -> CargoResult> { + // First up, attempt to load the cache. This could fail for all manner + // of reasons, but consider all of them non-fatal and just log their + // occurrence in case anyone is debugging anything. + let cache_path = cache_root.join(relative); + let mut cache_contents = None; + if let Some(index_version) = index_version { + match fs::read(&cache_path) { + Ok(contents) => match Summaries::parse_cache(contents, index_version) { + Ok(s) => { + log::debug!("fast path for registry cache of {:?}", relative); + if cfg!(debug_assertions) { + cache_contents = Some(s.raw_data); + } else { + return Ok(Some(s)); + } + } + Err(e) => { + log::debug!("failed to parse {:?} cache: {}", relative, e); + } + }, + Err(e) => log::debug!("cache missing for {:?} error: {}", relative, e), + } + } + + // This is the fallback path where we actually talk to libgit2 to load + // information. Here we parse every single line in the index (as we need + // to find the versions) + log::debug!("slow path for {:?}", relative); + let mut ret = Summaries::default(); + let mut hit_closure = false; + let mut cache_bytes = None; + let err = load.load(root, relative, &mut |contents| { + ret.raw_data = contents.to_vec(); + let mut cache = SummariesCache::default(); + hit_closure = true; + for line in split(contents, b'\n') { + // Attempt forwards-compatibility on the index by ignoring + // everything that we ourselves don't understand, that should + // allow future cargo implementations to break the + // interpretation of each line here and older cargo will simply + // ignore the new lines. + let summary = match IndexSummary::parse(line, source_id) { + Ok(summary) => summary, + Err(e) => { + log::info!("failed to parse {:?} registry package: {}", relative, e); + continue; + } + }; + let version = summary.summary.package_id().version().clone(); + cache.versions.push((version.clone(), line)); + ret.versions.insert(version, summary.into()); + } + if let Some(index_version) = index_version { + cache_bytes = Some(cache.serialize(index_version)); + } + Ok(()) + }); + + // We ignore lookup failures as those are just crates which don't exist + // or we haven't updated the registry yet. If we actually ran the + // closure though then we care about those errors. + if !hit_closure { + debug_assert!(cache_contents.is_none()); + return Ok(None); + } + err?; + + // If we've got debug assertions enabled and the cache was previously + // present and considered fresh this is where the debug assertions + // actually happens to verify that our cache is indeed fresh and + // computes exactly the same value as before. + if cfg!(debug_assertions) && cache_contents.is_some() { + assert_eq!(cache_bytes, cache_contents); + } + + // Once we have our `cache_bytes` which represents the `Summaries` we're + // about to return, write that back out to disk so future Cargo + // invocations can use it. + // + // This is opportunistic so we ignore failure here but are sure to log + // something in case of error. + if let Some(cache_bytes) = cache_bytes { + if fs::create_dir_all(cache_path.parent().unwrap()).is_ok() { + let path = Filesystem::new(cache_path.clone()); + config.assert_package_cache_locked(&path); + if let Err(e) = fs::write(cache_path, cache_bytes) { + log::info!("failed to write cache: {}", e); + } + } + } + + Ok(Some(ret)) + } + + /// Parses an open `File` which represents information previously cached by + /// Cargo. + pub fn parse_cache(contents: Vec, last_index_update: &str) -> CargoResult { + let cache = SummariesCache::parse(&contents, last_index_update)?; + let mut ret = Summaries::default(); + for (version, summary) in cache.versions { + let (start, end) = subslice_bounds(&contents, summary); + ret.versions + .insert(version, MaybeIndexSummary::Unparsed { start, end }); + } + ret.raw_data = contents; + return Ok(ret); + + // Returns the start/end offsets of `inner` with `outer`. Asserts that + // `inner` is a subslice of `outer`. + fn subslice_bounds(outer: &[u8], inner: &[u8]) -> (usize, usize) { + let outer_start = outer.as_ptr() as usize; + let outer_end = outer_start + outer.len(); + let inner_start = inner.as_ptr() as usize; + let inner_end = inner_start + inner.len(); + assert!(inner_start >= outer_start); + assert!(inner_end <= outer_end); + (inner_start - outer_start, inner_end - outer_start) + } + } +} + +// Implementation of serializing/deserializing the cache of summaries on disk. +// Currently the format looks like: +// +// +--------------+-------------+---+ +// | version byte | git sha rev | 0 | +// +--------------+-------------+---+ +// +// followed by... +// +// +----------------+---+------------+---+ +// | semver version | 0 | JSON blob | 0 | ... +// +----------------+---+------------+---+ +// +// The idea is that this is a very easy file for Cargo to parse in future +// invocations. The read from disk should be quite fast and then afterwards all +// we need to know is what versions correspond to which JSON blob. +// +// The leading version byte is intended to ensure that there's some level of +// future compatibility against changes to this cache format so if different +// versions of Cargo share the same cache they don't get too confused. The git +// sha lets us know when the file needs to be regenerated (it needs regeneration +// whenever the index itself updates). + +const CURRENT_CACHE_VERSION: u8 = 1; + +impl<'a> SummariesCache<'a> { + fn parse(data: &'a [u8], last_index_update: &str) -> CargoResult> { + // NB: keep this method in sync with `serialize` below + let (first_byte, rest) = data + .split_first() + .ok_or_else(|| failure::format_err!("malformed cache"))?; + if *first_byte != CURRENT_CACHE_VERSION { + failure::bail!("looks like a different Cargo's cache, bailing out"); + } + let mut iter = split(rest, 0); + if let Some(update) = iter.next() { + if update != last_index_update.as_bytes() { + failure::bail!( + "cache out of date: current index ({}) != cache ({})", + last_index_update, + str::from_utf8(update)?, + ) + } + } else { + failure::bail!("malformed file"); + } + let mut ret = SummariesCache::default(); + while let Some(version) = iter.next() { + let version = str::from_utf8(version)?; + let version = Version::parse(version)?; + let summary = iter.next().unwrap(); + ret.versions.push((version, summary)); + } + Ok(ret) + } + + fn serialize(&self, index_version: &str) -> Vec { + // NB: keep this method in sync with `parse` above + let size = self + .versions + .iter() + .map(|(_version, data)| (10 + data.len())) + .sum(); + let mut contents = Vec::with_capacity(size); + contents.push(CURRENT_CACHE_VERSION); + contents.extend_from_slice(index_version.as_bytes()); + contents.push(0); + for (version, data) in self.versions.iter() { + contents.extend_from_slice(version.to_string().as_bytes()); + contents.push(0); + contents.extend_from_slice(data); + contents.push(0); + } + contents + } +} + +impl MaybeIndexSummary { + /// Parses this "maybe a summary" into a `Parsed` for sure variant. + /// + /// Does nothing if this is already `Parsed`, and otherwise the `raw_data` + /// passed in is sliced with the bounds in `Unparsed` and then actually + /// parsed. + fn parse(&mut self, raw_data: &[u8], source_id: SourceId) -> CargoResult<&IndexSummary> { + let (start, end) = match self { + MaybeIndexSummary::Unparsed { start, end } => (*start, *end), + MaybeIndexSummary::Parsed(summary) => return Ok(summary), + }; + let summary = IndexSummary::parse(&raw_data[start..end], source_id)?; + *self = MaybeIndexSummary::Parsed(summary); + match self { + MaybeIndexSummary::Unparsed { .. } => unreachable!(), + MaybeIndexSummary::Parsed(summary) => Ok(summary), + } + } +} + +impl From for MaybeIndexSummary { + fn from(summary: IndexSummary) -> MaybeIndexSummary { + MaybeIndexSummary::Parsed(summary) + } +} + +impl IndexSummary { + /// Parses a line from the registry's index file into an `IndexSummary` for + /// a package. + /// + /// The `line` provided is expected to be valid JSON. + fn parse(line: &[u8], source_id: SourceId) -> CargoResult { + let RegistryPackage { + name, + vers, + cksum, + deps, + features, + yanked, + links, + } = serde_json::from_slice(line)?; + log::trace!("json parsed registry {}/{}", name, vers); + let pkgid = PackageId::new(&name, &vers, source_id)?; + let deps = deps + .into_iter() + .map(|dep| dep.into_dep(source_id)) + .collect::>>()?; + let ftrs = features + .iter() + .map(|(k, v)| (k.clone(), (None, v.clone()))) + .collect(); + let mut summary = Summary::new(pkgid, deps, &ftrs, links, false)?; + summary.set_checksum(cksum.clone()); + Ok(IndexSummary { + summary, + yanked: yanked.unwrap_or(false), + hash: cksum, + }) + } +} + +fn split<'a>(haystack: &'a [u8], needle: u8) -> impl Iterator + 'a { + struct Split<'a> { + haystack: &'a [u8], + needle: u8, + } + + impl<'a> Iterator for Split<'a> { + type Item = &'a [u8]; + + fn next(&mut self) -> Option<&'a [u8]> { + if self.haystack.is_empty() { + return None; + } + let (ret, remaining) = match memchr::memchr(self.needle, self.haystack) { + Some(pos) => (&self.haystack[..pos], &self.haystack[pos + 1..]), + None => (self.haystack, &[][..]), + }; + self.haystack = remaining; + Some(ret) + } + } + + Split { haystack, needle } +} diff --git a/src/cargo/sources/registry/local.rs b/src/cargo/sources/registry/local.rs new file mode 100644 index 00000000000..35c8db48361 --- /dev/null +++ b/src/cargo/sources/registry/local.rs @@ -0,0 +1,119 @@ +use crate::core::{InternedString, PackageId}; +use crate::sources::registry::{MaybeLock, RegistryConfig, RegistryData}; +use crate::util::errors::CargoResult; +use crate::util::paths; +use crate::util::{Config, Filesystem, Sha256}; +use std::fs::File; +use std::io::prelude::*; +use std::io::SeekFrom; +use std::path::Path; + +pub struct LocalRegistry<'cfg> { + index_path: Filesystem, + root: Filesystem, + src_path: Filesystem, + config: &'cfg Config, +} + +impl<'cfg> LocalRegistry<'cfg> { + pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> { + LocalRegistry { + src_path: config.registry_source_path().join(name), + index_path: Filesystem::new(root.join("index")), + root: Filesystem::new(root.to_path_buf()), + config, + } + } +} + +impl<'cfg> RegistryData for LocalRegistry<'cfg> { + fn prepare(&self) -> CargoResult<()> { + Ok(()) + } + + fn index_path(&self) -> &Filesystem { + &self.index_path + } + + fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { + // Note that the `*_unlocked` variant is used here since we're not + // modifying the index and it's required to be externally synchronized. + path.as_path_unlocked() + } + + fn current_version(&self) -> Option { + None + } + + fn load( + &self, + root: &Path, + path: &Path, + data: &mut dyn FnMut(&[u8]) -> CargoResult<()>, + ) -> CargoResult<()> { + data(&paths::read_bytes(&root.join(path))?) + } + + fn config(&mut self) -> CargoResult> { + // Local registries don't have configuration for remote APIs or anything + // like that + Ok(None) + } + + fn update_index(&mut self) -> CargoResult<()> { + // Nothing to update, we just use what's on disk. Verify it actually + // exists though. We don't use any locks as we're just checking whether + // these directories exist. + let root = self.root.clone().into_path_unlocked(); + if !root.is_dir() { + failure::bail!("local registry path is not a directory: {}", root.display()) + } + let index_path = self.index_path.clone().into_path_unlocked(); + if !index_path.is_dir() { + failure::bail!( + "local registry index path is not a directory: {}", + index_path.display() + ) + } + Ok(()) + } + + fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult { + let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); + + // Note that the usage of `into_path_unlocked` here is because the local + // crate files here never change in that we're not the one writing them, + // so it's not our responsibility to synchronize access to them. + let path = self.root.join(&crate_file).into_path_unlocked(); + let mut crate_file = File::open(&path)?; + + // If we've already got an unpacked version of this crate, then skip the + // checksum below as it is in theory already verified. + let dst = format!("{}-{}", pkg.name(), pkg.version()); + if self.src_path.join(dst).into_path_unlocked().exists() { + return Ok(MaybeLock::Ready(crate_file)); + } + + self.config.shell().status("Unpacking", pkg)?; + + // We don't actually need to download anything per-se, we just need to + // verify the checksum matches the .crate file itself. + let actual = Sha256::new().update_file(&crate_file)?.finish_hex(); + if actual != checksum { + failure::bail!("failed to verify the checksum of `{}`", pkg) + } + + crate_file.seek(SeekFrom::Start(0))?; + + Ok(MaybeLock::Ready(crate_file)) + } + + fn finish_download( + &mut self, + _pkg: PackageId, + _checksum: &str, + _data: &[u8], + ) -> CargoResult { + panic!("this source doesn't download") + } +} diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs new file mode 100644 index 00000000000..a63d15e0103 --- /dev/null +++ b/src/cargo/sources/registry/mod.rs @@ -0,0 +1,638 @@ +//! A `Source` for registry-based packages. +//! +//! # What's a Registry? +//! +//! Registries are central locations where packages can be uploaded to, +//! discovered, and searched for. The purpose of a registry is to have a +//! location that serves as permanent storage for versions of a crate over time. +//! +//! Compared to git sources, a registry provides many packages as well as many +//! versions simultaneously. Git sources can also have commits deleted through +//! rebasings where registries cannot have their versions deleted. +//! +//! # The Index of a Registry +//! +//! One of the major difficulties with a registry is that hosting so many +//! packages may quickly run into performance problems when dealing with +//! dependency graphs. It's infeasible for cargo to download the entire contents +//! of the registry just to resolve one package's dependencies, for example. As +//! a result, cargo needs some efficient method of querying what packages are +//! available on a registry, what versions are available, and what the +//! dependencies for each version is. +//! +//! One method of doing so would be having the registry expose an HTTP endpoint +//! which can be queried with a list of packages and a response of their +//! dependencies and versions is returned. This is somewhat inefficient however +//! as we may have to hit the endpoint many times and we may have already +//! queried for much of the data locally already (for other packages, for +//! example). This also involves inventing a transport format between the +//! registry and Cargo itself, so this route was not taken. +//! +//! Instead, Cargo communicates with registries through a git repository +//! referred to as the Index. The Index of a registry is essentially an easily +//! query-able version of the registry's database for a list of versions of a +//! package as well as a list of dependencies for each version. +//! +//! Using git to host this index provides a number of benefits: +//! +//! * The entire index can be stored efficiently locally on disk. This means +//! that all queries of a registry can happen locally and don't need to touch +//! the network. +//! +//! * Updates of the index are quite efficient. Using git buys incremental +//! updates, compressed transmission, etc for free. The index must be updated +//! each time we need fresh information from a registry, but this is one +//! update of a git repository that probably hasn't changed a whole lot so +//! it shouldn't be too expensive. +//! +//! Additionally, each modification to the index is just appending a line at +//! the end of a file (the exact format is described later). This means that +//! the commits for an index are quite small and easily applied/compressable. +//! +//! ## The format of the Index +//! +//! The index is a store for the list of versions for all packages known, so its +//! format on disk is optimized slightly to ensure that `ls registry` doesn't +//! produce a list of all packages ever known. The index also wants to ensure +//! that there's not a million files which may actually end up hitting +//! filesystem limits at some point. To this end, a few decisions were made +//! about the format of the registry: +//! +//! 1. Each crate will have one file corresponding to it. Each version for a +//! crate will just be a line in this file. +//! 2. There will be two tiers of directories for crate names, under which +//! crates corresponding to those tiers will be located. +//! +//! As an example, this is an example hierarchy of an index: +//! +//! ```notrust +//! . +//! ├── 3 +//! │   └── u +//! │   └── url +//! ├── bz +//! │   └── ip +//! │   └── bzip2 +//! ├── config.json +//! ├── en +//! │   └── co +//! │   └── encoding +//! └── li +//!    ├── bg +//!    │   └── libgit2 +//!    └── nk +//!    └── link-config +//! ``` +//! +//! The root of the index contains a `config.json` file with a few entries +//! corresponding to the registry (see `RegistryConfig` below). +//! +//! Otherwise, there are three numbered directories (1, 2, 3) for crates with +//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the +//! crate files underneath them, while the 3 directory is sharded by the first +//! letter of the crate name. +//! +//! Otherwise the top-level directory contains many two-letter directory names, +//! each of which has many sub-folders with two letters. At the end of all these +//! are the actual crate files themselves. +//! +//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as +//! efficient lookup based on the crate name itself. +//! +//! ## Crate files +//! +//! Each file in the index is the history of one crate over time. Each line in +//! the file corresponds to one version of a crate, stored in JSON format (see +//! the `RegistryPackage` structure below). +//! +//! As new versions are published, new lines are appended to this file. The only +//! modifications to this file that should happen over time are yanks of a +//! particular version. +//! +//! # Downloading Packages +//! +//! The purpose of the Index was to provide an efficient method to resolve the +//! dependency graph for a package. So far we only required one network +//! interaction to update the registry's repository (yay!). After resolution has +//! been performed, however we need to download the contents of packages so we +//! can read the full manifest and build the source code. +//! +//! To accomplish this, this source's `download` method will make an HTTP +//! request per-package requested to download tarballs into a local cache. These +//! tarballs will then be unpacked into a destination folder. +//! +//! Note that because versions uploaded to the registry are frozen forever that +//! the HTTP download and unpacking can all be skipped if the version has +//! already been downloaded and unpacked. This caching allows us to only +//! download a package when absolutely necessary. +//! +//! # Filesystem Hierarchy +//! +//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: +//! +//! ```notrust +//! # A folder under which all registry metadata is hosted (similar to +//! # $HOME/.cargo/git) +//! $HOME/.cargo/registry/ +//! +//! # For each registry that cargo knows about (keyed by hostname + hash) +//! # there is a folder which is the checked out version of the index for +//! # the registry in this location. Note that this is done so cargo can +//! # support multiple registries simultaneously +//! index/ +//! registry1-/ +//! registry2-/ +//! ... +//! +//! # This folder is a cache for all downloaded tarballs from a registry. +//! # Once downloaded and verified, a tarball never changes. +//! cache/ +//! registry1-/-.crate +//! ... +//! +//! # Location in which all tarballs are unpacked. Each tarball is known to +//! # be frozen after downloading, so transitively this folder is also +//! # frozen once its unpacked (it's never unpacked again) +//! src/ +//! registry1-/-/... +//! ... +//! ``` + +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::collections::HashSet; +use std::fs::{File, OpenOptions}; +use std::io::Write; +use std::path::{Path, PathBuf}; + +use flate2::read::GzDecoder; +use log::debug; +use semver::{Version, VersionReq}; +use serde::Deserialize; +use tar::Archive; + +use crate::core::dependency::{Dependency, Kind}; +use crate::core::source::MaybePackage; +use crate::core::{InternedString, Package, PackageId, Source, SourceId, Summary}; +use crate::sources::PathSource; +use crate::util::errors::CargoResultExt; +use crate::util::hex; +use crate::util::into_url::IntoUrl; +use crate::util::{internal, CargoResult, Config, Filesystem}; + +const PACKAGE_SOURCE_LOCK: &str = ".cargo-ok"; +pub const CRATES_IO_INDEX: &str = "https://github.com/rust-lang/crates.io-index"; +pub const CRATES_IO_REGISTRY: &str = "crates-io"; +const CRATE_TEMPLATE: &str = "{crate}"; +const VERSION_TEMPLATE: &str = "{version}"; + +pub struct RegistrySource<'cfg> { + source_id: SourceId, + src_path: Filesystem, + config: &'cfg Config, + updated: bool, + ops: Box, + index: index::RegistryIndex<'cfg>, + yanked_whitelist: HashSet, +} + +#[derive(Deserialize)] +pub struct RegistryConfig { + /// Download endpoint for all crates. + /// + /// The string is a template which will generate the download URL for the + /// tarball of a specific version of a crate. The substrings `{crate}` and + /// `{version}` will be replaced with the crate's name and version + /// respectively. + /// + /// For backwards compatibility, if the string does not contain `{crate}` or + /// `{version}`, it will be extended with `/{crate}/{version}/download` to + /// support registries like crates.io which were created before the + /// templating setup was created. + pub dl: String, + + /// API endpoint for the registry. This is what's actually hit to perform + /// operations like yanks, owner modifications, publish new crates, etc. + /// If this is None, the registry does not support API commands. + pub api: Option, +} + +#[derive(Deserialize)] +pub struct RegistryPackage<'a> { + name: Cow<'a, str>, + vers: Version, + deps: Vec>, + features: BTreeMap, Vec>>, + cksum: String, + yanked: Option, + links: Option>, +} + +#[test] +fn escaped_cher_in_json() { + let _: RegistryPackage<'_> = serde_json::from_str( + r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"#, + ) + .unwrap(); + let _: RegistryPackage<'_> = serde_json::from_str( + r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{"test":["k","q"]},"links":"a-sys"}"# + ).unwrap(); + + // Now we add escaped cher all the places they can go + // these are not valid, but it should error later than json parsing + let _: RegistryPackage<'_> = serde_json::from_str( + r#"{ + "name":"This name has a escaped cher in it \n\t\" ", + "vers":"0.0.1", + "deps":[{ + "name": " \n\t\" ", + "req": " \n\t\" ", + "features": [" \n\t\" "], + "optional": true, + "default_features": true, + "target": " \n\t\" ", + "kind": " \n\t\" ", + "registry": " \n\t\" " + }], + "cksum":"bae3", + "features":{"test \n\t\" ":["k \n\t\" ","q \n\t\" "]}, + "links":" \n\t\" "}"#, + ) + .unwrap(); +} + +#[derive(Deserialize)] +#[serde(field_identifier, rename_all = "lowercase")] +enum Field { + Name, + Vers, + Deps, + Features, + Cksum, + Yanked, + Links, +} + +#[derive(Deserialize)] +struct RegistryDependency<'a> { + name: Cow<'a, str>, + req: Cow<'a, str>, + features: Vec>, + optional: bool, + default_features: bool, + target: Option>, + kind: Option>, + registry: Option>, + package: Option>, + public: Option, +} + +impl<'a> RegistryDependency<'a> { + /// Converts an encoded dependency in the registry to a cargo dependency + pub fn into_dep(self, default: SourceId) -> CargoResult { + let RegistryDependency { + name, + req, + mut features, + optional, + default_features, + target, + kind, + registry, + package, + public, + } = self; + + let id = if let Some(registry) = ®istry { + SourceId::for_registry(®istry.into_url()?)? + } else { + default + }; + + let mut dep = + Dependency::parse_no_deprecated(package.as_ref().unwrap_or(&name), Some(&req), id)?; + if package.is_some() { + dep.set_explicit_name_in_toml(&name); + } + let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { + "dev" => Kind::Development, + "build" => Kind::Build, + _ => Kind::Normal, + }; + + let platform = match target { + Some(target) => Some(target.parse()?), + None => None, + }; + + // All dependencies are private by default + let public = public.unwrap_or(false); + + // Unfortunately older versions of cargo and/or the registry ended up + // publishing lots of entries where the features array contained the + // empty feature, "", inside. This confuses the resolution process much + // later on and these features aren't actually valid, so filter them all + // out here. + features.retain(|s| !s.is_empty()); + + // In index, "registry" is null if it is from the same index. + // In Cargo.toml, "registry" is None if it is from the default + if !id.is_default_registry() { + dep.set_registry_id(id); + } + + dep.set_optional(optional) + .set_default_features(default_features) + .set_features(features) + .set_platform(platform) + .set_kind(kind) + .set_public(public); + + Ok(dep) + } +} + +pub trait RegistryData { + fn prepare(&self) -> CargoResult<()>; + fn index_path(&self) -> &Filesystem; + fn load( + &self, + root: &Path, + path: &Path, + data: &mut dyn FnMut(&[u8]) -> CargoResult<()>, + ) -> CargoResult<()>; + fn config(&mut self) -> CargoResult>; + fn update_index(&mut self) -> CargoResult<()>; + fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult; + fn finish_download(&mut self, pkg: PackageId, checksum: &str, data: &[u8]) + -> CargoResult; + + fn is_crate_downloaded(&self, _pkg: PackageId) -> bool { + true + } + fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path; + fn current_version(&self) -> Option; +} + +pub enum MaybeLock { + Ready(File), + Download { url: String, descriptor: String }, +} + +mod index; +mod local; +mod remote; + +fn short_name(id: SourceId) -> String { + let hash = hex::short_hash(&id); + let ident = id.url().host_str().unwrap_or("").to_string(); + format!("{}-{}", ident, hash) +} + +impl<'cfg> RegistrySource<'cfg> { + pub fn remote( + source_id: SourceId, + yanked_whitelist: &HashSet, + config: &'cfg Config, + ) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = remote::RemoteRegistry::new(source_id, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops), yanked_whitelist) + } + + pub fn local( + source_id: SourceId, + path: &Path, + yanked_whitelist: &HashSet, + config: &'cfg Config, + ) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = local::LocalRegistry::new(path, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops), yanked_whitelist) + } + + fn new( + source_id: SourceId, + config: &'cfg Config, + name: &str, + ops: Box, + yanked_whitelist: &HashSet, + ) -> RegistrySource<'cfg> { + RegistrySource { + src_path: config.registry_source_path().join(name), + config, + source_id, + updated: false, + index: index::RegistryIndex::new(source_id, ops.index_path(), config), + yanked_whitelist: yanked_whitelist.clone(), + ops, + } + } + + /// Decode the configuration stored within the registry. + /// + /// This requires that the index has been at least checked out. + pub fn config(&mut self) -> CargoResult> { + self.ops.config() + } + + /// Unpacks a downloaded package into a location where it's ready to be + /// compiled. + /// + /// No action is taken if the source looks like it's already unpacked. + fn unpack_package(&self, pkg: PackageId, tarball: &File) -> CargoResult { + // The `.cargo-ok` file is used to track if the source is already + // unpacked. + let package_dir = format!("{}-{}", pkg.name(), pkg.version()); + let dst = self.src_path.join(&package_dir); + dst.create_dir()?; + let path = dst.join(PACKAGE_SOURCE_LOCK); + let path = self.config.assert_package_cache_locked(&path); + let unpack_dir = path.parent().unwrap(); + if let Ok(meta) = path.metadata() { + if meta.len() > 0 { + return Ok(unpack_dir.to_path_buf()); + } + } + let mut ok = OpenOptions::new() + .create(true) + .read(true) + .write(true) + .open(&path)?; + + let gz = GzDecoder::new(tarball); + let mut tar = Archive::new(gz); + let prefix = unpack_dir.file_name().unwrap(); + let parent = unpack_dir.parent().unwrap(); + for entry in tar.entries()? { + let mut entry = entry.chain_err(|| "failed to iterate over archive")?; + let entry_path = entry + .path() + .chain_err(|| "failed to read entry path")? + .into_owned(); + + // We're going to unpack this tarball into the global source + // directory, but we want to make sure that it doesn't accidentally + // (or maliciously) overwrite source code from other crates. Cargo + // itself should never generate a tarball that hits this error, and + // crates.io should also block uploads with these sorts of tarballs, + // but be extra sure by adding a check here as well. + if !entry_path.starts_with(prefix) { + failure::bail!( + "invalid tarball downloaded, contains \ + a file at {:?} which isn't under {:?}", + entry_path, + prefix + ) + } + + // Once that's verified, unpack the entry as usual. + entry + .unpack_in(parent) + .chain_err(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; + } + + // Write to the lock file to indicate that unpacking was successful. + write!(ok, "ok")?; + + Ok(unpack_dir.to_path_buf()) + } + + fn do_update(&mut self) -> CargoResult<()> { + self.ops.update_index()?; + let path = self.ops.index_path(); + self.index = index::RegistryIndex::new(self.source_id, path, self.config); + self.updated = true; + Ok(()) + } + + fn get_pkg(&mut self, package: PackageId, path: &File) -> CargoResult { + let path = self + .unpack_package(package, path) + .chain_err(|| internal(format!("failed to unpack package `{}`", package)))?; + let mut src = PathSource::new(&path, self.source_id, self.config); + src.update()?; + let mut pkg = match src.download(package)? { + MaybePackage::Ready(pkg) => pkg, + MaybePackage::Download { .. } => unreachable!(), + }; + + // After we've loaded the package configure its summary's `checksum` + // field with the checksum we know for this `PackageId`. + let req = VersionReq::exact(package.version()); + let summary_with_cksum = self + .index + .summaries(package.name(), &req, &mut *self.ops)? + .map(|s| s.summary.clone()) + .next() + .expect("summary not found"); + if let Some(cksum) = summary_with_cksum.checksum() { + pkg.manifest_mut() + .summary_mut() + .set_checksum(cksum.to_string()); + } + + Ok(pkg) + } +} + +impl<'cfg> Source for RegistrySource<'cfg> { + fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + // If this is a precise dependency, then it came from a lock file and in + // theory the registry is known to contain this version. If, however, we + // come back with no summaries, then our registry may need to be + // updated, so we fall back to performing a lazy update. + if dep.source_id().precise().is_some() && !self.updated { + debug!("attempting query without update"); + let mut called = false; + self.index + .query_inner(dep, &mut *self.ops, &self.yanked_whitelist, &mut |s| { + if dep.matches(&s) { + called = true; + f(s); + } + })?; + if called { + return Ok(()); + } else { + debug!("falling back to an update"); + self.do_update()?; + } + } + + self.index + .query_inner(dep, &mut *self.ops, &self.yanked_whitelist, &mut |s| { + if dep.matches(&s) { + f(s); + } + }) + } + + fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + self.index + .query_inner(dep, &mut *self.ops, &self.yanked_whitelist, f) + } + + fn supports_checksums(&self) -> bool { + true + } + + fn requires_precise(&self) -> bool { + false + } + + fn source_id(&self) -> SourceId { + self.source_id + } + + fn update(&mut self) -> CargoResult<()> { + // If we have an imprecise version then we don't know what we're going + // to look for, so we always attempt to perform an update here. + // + // If we have a precise version, then we'll update lazily during the + // querying phase. Note that precise in this case is only + // `Some("locked")` as other `Some` values indicate a `cargo update + // --precise` request + if self.source_id.precise() != Some("locked") { + self.do_update()?; + } else { + debug!("skipping update due to locked registry"); + } + Ok(()) + } + + fn download(&mut self, package: PackageId) -> CargoResult { + let hash = self.index.hash(package, &mut *self.ops)?; + match self.ops.download(package, &hash)? { + MaybeLock::Ready(file) => self.get_pkg(package, &file).map(MaybePackage::Ready), + MaybeLock::Download { url, descriptor } => { + Ok(MaybePackage::Download { url, descriptor }) + } + } + } + + fn finish_download(&mut self, package: PackageId, data: Vec) -> CargoResult { + let hash = self.index.hash(package, &mut *self.ops)?; + let file = self.ops.finish_download(package, &hash, &data)?; + self.get_pkg(package, &file) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } + + fn describe(&self) -> String { + self.source_id.display_index() + } + + fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { + self.yanked_whitelist.extend(pkgs); + } + + fn is_yanked(&mut self, pkg: PackageId) -> CargoResult { + if !self.updated { + self.do_update()?; + } + self.index.is_yanked(pkg, &mut *self.ops) + } +} diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs new file mode 100644 index 00000000000..c44964432e2 --- /dev/null +++ b/src/cargo/sources/registry/remote.rs @@ -0,0 +1,328 @@ +use crate::core::{InternedString, PackageId, SourceId}; +use crate::sources::git; +use crate::sources::registry::MaybeLock; +use crate::sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, VERSION_TEMPLATE}; +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::{Config, Filesystem, Sha256}; +use lazycell::LazyCell; +use log::{debug, trace}; +use std::cell::{Cell, Ref, RefCell}; +use std::fmt::Write as FmtWrite; +use std::fs::{self, File, OpenOptions}; +use std::io::prelude::*; +use std::io::SeekFrom; +use std::mem; +use std::path::Path; +use std::str; + +pub struct RemoteRegistry<'cfg> { + index_path: Filesystem, + cache_path: Filesystem, + source_id: SourceId, + config: &'cfg Config, + tree: RefCell>>, + repo: LazyCell, + head: Cell>, + current_sha: Cell>, +} + +impl<'cfg> RemoteRegistry<'cfg> { + pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> { + RemoteRegistry { + index_path: config.registry_index_path().join(name), + cache_path: config.registry_cache_path().join(name), + source_id, + config, + tree: RefCell::new(None), + repo: LazyCell::new(), + head: Cell::new(None), + current_sha: Cell::new(None), + } + } + + fn repo(&self) -> CargoResult<&git2::Repository> { + self.repo.try_borrow_with(|| { + let path = self.config.assert_package_cache_locked(&self.index_path); + + // Fast path without a lock + if let Ok(repo) = git2::Repository::open(&path) { + trace!("opened a repo without a lock"); + return Ok(repo); + } + + // Ok, now we need to lock and try the whole thing over again. + trace!("acquiring registry index lock"); + match git2::Repository::open(&path) { + Ok(repo) => Ok(repo), + Err(_) => { + drop(fs::remove_dir_all(&path)); + fs::create_dir_all(&path)?; + + // Note that we'd actually prefer to use a bare repository + // here as we're not actually going to check anything out. + // All versions of Cargo, though, share the same CARGO_HOME, + // so for compatibility with older Cargo which *does* do + // checkouts we make sure to initialize a new full + // repository (not a bare one). + // + // We should change this to `init_bare` whenever we feel + // like enough time has passed or if we change the directory + // that the folder is located in, such as by changing the + // hash at the end of the directory. + // + // Note that in the meantime we also skip `init.templatedir` + // as it can be misconfigured sometimes or otherwise add + // things that we don't want. + let mut opts = git2::RepositoryInitOptions::new(); + opts.external_template(false); + Ok(git2::Repository::init_opts(&path, &opts) + .chain_err(|| "failed to initialized index git repository")?) + } + } + }) + } + + fn head(&self) -> CargoResult { + if self.head.get().is_none() { + let oid = self.repo()?.refname_to_id("refs/remotes/origin/master")?; + self.head.set(Some(oid)); + } + Ok(self.head.get().unwrap()) + } + + fn tree(&self) -> CargoResult>> { + { + let tree = self.tree.borrow(); + if tree.is_some() { + return Ok(Ref::map(tree, |s| s.as_ref().unwrap())); + } + } + let repo = self.repo()?; + let commit = repo.find_commit(self.head()?)?; + let tree = commit.tree()?; + + // Unfortunately in libgit2 the tree objects look like they've got a + // reference to the repository object which means that a tree cannot + // outlive the repository that it came from. Here we want to cache this + // tree, though, so to accomplish this we transmute it to a static + // lifetime. + // + // Note that we don't actually hand out the static lifetime, instead we + // only return a scoped one from this function. Additionally the repo + // we loaded from (above) lives as long as this object + // (`RemoteRegistry`) so we then just need to ensure that the tree is + // destroyed first in the destructor, hence the destructor on + // `RemoteRegistry` below. + let tree = unsafe { mem::transmute::, git2::Tree<'static>>(tree) }; + *self.tree.borrow_mut() = Some(tree); + Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) + } + + fn filename(&self, pkg: PackageId) -> String { + format!("{}-{}.crate", pkg.name(), pkg.version()) + } +} + +const LAST_UPDATED_FILE: &str = ".last-updated"; + +impl<'cfg> RegistryData for RemoteRegistry<'cfg> { + fn prepare(&self) -> CargoResult<()> { + self.repo()?; // create intermediate dirs and initialize the repo + Ok(()) + } + + fn index_path(&self) -> &Filesystem { + &self.index_path + } + + fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { + self.config.assert_package_cache_locked(path) + } + + fn current_version(&self) -> Option { + if let Some(sha) = self.current_sha.get() { + return Some(sha); + } + let sha = InternedString::new(&self.head().ok()?.to_string()); + self.current_sha.set(Some(sha)); + Some(sha) + } + + fn load( + &self, + _root: &Path, + path: &Path, + data: &mut dyn FnMut(&[u8]) -> CargoResult<()>, + ) -> CargoResult<()> { + // Note that the index calls this method and the filesystem is locked + // in the index, so we don't need to worry about an `update_index` + // happening in a different process. + let repo = self.repo()?; + let tree = self.tree()?; + let entry = tree.get_path(path)?; + let object = entry.to_object(repo)?; + let blob = match object.as_blob() { + Some(blob) => blob, + None => failure::bail!("path `{}` is not a blob in the git repo", path.display()), + }; + data(blob.content()) + } + + fn config(&mut self) -> CargoResult> { + debug!("loading config"); + self.prepare()?; + self.config.assert_package_cache_locked(&self.index_path); + let mut config = None; + self.load(Path::new(""), Path::new("config.json"), &mut |json| { + config = Some(serde_json::from_slice(json)?); + Ok(()) + })?; + trace!("config loaded"); + Ok(config) + } + + fn update_index(&mut self) -> CargoResult<()> { + if self.config.offline() { + if self.repo()?.is_empty()? { + // An empty repository is guaranteed to fail, since hitting + // this path means we need at least one crate. This is an + // attempt to provide a better error message other than "no + // matching package named …". + failure::bail!( + "unable to fetch {} in offline mode\n\ + Try running without the offline flag, or try running \ + `cargo fetch` within your project directory before going offline.", + self.source_id + ); + } + return Ok(()); + } + if self.config.cli_unstable().no_index_update { + return Ok(()); + } + // Make sure the index is only updated once per session since it is an + // expensive operation. This generally only happens when the resolver + // is run multiple times, such as during `cargo publish`. + if self.config.updated_sources().contains(&self.source_id) { + return Ok(()); + } + + debug!("updating the index"); + + // Ensure that we'll actually be able to acquire an HTTP handle later on + // once we start trying to download crates. This will weed out any + // problems with `.cargo/config` configuration related to HTTP. + // + // This way if there's a problem the error gets printed before we even + // hit the index, which may not actually read this configuration. + self.config.http()?; + + self.prepare()?; + self.head.set(None); + *self.tree.borrow_mut() = None; + self.current_sha.set(None); + let path = self.config.assert_package_cache_locked(&self.index_path); + self.config + .shell() + .status("Updating", self.source_id.display_index())?; + + // git fetch origin master + let url = self.source_id.url(); + let refspec = "refs/heads/master:refs/remotes/origin/master"; + let repo = self.repo.borrow_mut().unwrap(); + git::fetch(repo, url, refspec, self.config) + .chain_err(|| format!("failed to fetch `{}`", url))?; + self.config.updated_sources().insert(self.source_id); + + // Create a dummy file to record the mtime for when we updated the + // index. + File::create(&path.join(LAST_UPDATED_FILE))?; + + Ok(()) + } + + fn download(&mut self, pkg: PackageId, _checksum: &str) -> CargoResult { + let filename = self.filename(pkg); + + // Attempt to open an read-only copy first to avoid an exclusive write + // lock and also work with read-only filesystems. Note that we check the + // length of the file like below to handle interrupted downloads. + // + // If this fails then we fall through to the exclusive path where we may + // have to redownload the file. + let path = self.cache_path.join(&filename); + let path = self.config.assert_package_cache_locked(&path); + if let Ok(dst) = File::open(&path) { + let meta = dst.metadata()?; + if meta.len() > 0 { + return Ok(MaybeLock::Ready(dst)); + } + } + + let config = self.config()?.unwrap(); + let mut url = config.dl; + if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) { + write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap(); + } + let url = url + .replace(CRATE_TEMPLATE, &*pkg.name()) + .replace(VERSION_TEMPLATE, &pkg.version().to_string()); + + Ok(MaybeLock::Download { + url, + descriptor: pkg.to_string(), + }) + } + + fn finish_download( + &mut self, + pkg: PackageId, + checksum: &str, + data: &[u8], + ) -> CargoResult { + // Verify what we just downloaded + let actual = Sha256::new().update(data).finish_hex(); + if actual != checksum { + failure::bail!("failed to verify the checksum of `{}`", pkg) + } + + let filename = self.filename(pkg); + self.cache_path.create_dir()?; + let path = self.cache_path.join(&filename); + let path = self.config.assert_package_cache_locked(&path); + let mut dst = OpenOptions::new() + .create(true) + .read(true) + .write(true) + .open(&path)?; + let meta = dst.metadata()?; + if meta.len() > 0 { + return Ok(dst); + } + + dst.write_all(data)?; + dst.seek(SeekFrom::Start(0))?; + Ok(dst) + } + + fn is_crate_downloaded(&self, pkg: PackageId) -> bool { + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let path = Path::new(&filename); + + let path = self.cache_path.join(path); + let path = self.config.assert_package_cache_locked(&path); + if let Ok(dst) = File::open(path) { + if let Ok(meta) = dst.metadata() { + return meta.len() > 0; + } + } + false + } +} + +impl<'cfg> Drop for RemoteRegistry<'cfg> { + fn drop(&mut self) { + // Just be sure to drop this before our other fields + self.tree.borrow_mut().take(); + } +} diff --git a/src/cargo/sources/replaced.rs b/src/cargo/sources/replaced.rs new file mode 100644 index 00000000000..7f4a622fd84 --- /dev/null +++ b/src/cargo/sources/replaced.rs @@ -0,0 +1,128 @@ +use crate::core::source::MaybePackage; +use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; +use crate::util::errors::{CargoResult, CargoResultExt}; + +pub struct ReplacedSource<'cfg> { + to_replace: SourceId, + replace_with: SourceId, + inner: Box, +} + +impl<'cfg> ReplacedSource<'cfg> { + pub fn new( + to_replace: SourceId, + replace_with: SourceId, + src: Box, + ) -> ReplacedSource<'cfg> { + ReplacedSource { + to_replace, + replace_with, + inner: src, + } + } +} + +impl<'cfg> Source for ReplacedSource<'cfg> { + fn source_id(&self) -> SourceId { + self.to_replace + } + + fn replaced_source_id(&self) -> SourceId { + self.replace_with + } + + fn supports_checksums(&self) -> bool { + self.inner.supports_checksums() + } + + fn requires_precise(&self) -> bool { + self.inner.requires_precise() + } + + fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + let (replace_with, to_replace) = (self.replace_with, self.to_replace); + let dep = dep.clone().map_source(to_replace, replace_with); + + self.inner + .query(&dep, &mut |summary| { + f(summary.map_source(replace_with, to_replace)) + }) + .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + Ok(()) + } + + fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { + let (replace_with, to_replace) = (self.replace_with, self.to_replace); + let dep = dep.clone().map_source(to_replace, replace_with); + + self.inner + .fuzzy_query(&dep, &mut |summary| { + f(summary.map_source(replace_with, to_replace)) + }) + .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + Ok(()) + } + + fn update(&mut self) -> CargoResult<()> { + self.inner + .update() + .chain_err(|| format!("failed to update replaced source {}", self.to_replace))?; + Ok(()) + } + + fn download(&mut self, id: PackageId) -> CargoResult { + let id = id.with_source_id(self.replace_with); + let pkg = self + .inner + .download(id) + .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; + Ok(match pkg { + MaybePackage::Ready(pkg) => { + MaybePackage::Ready(pkg.map_source(self.replace_with, self.to_replace)) + } + other @ MaybePackage::Download { .. } => other, + }) + } + + fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { + let id = id.with_source_id(self.replace_with); + let pkg = self + .inner + .finish_download(id, data) + .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; + Ok(pkg.map_source(self.replace_with, self.to_replace)) + } + + fn fingerprint(&self, id: &Package) -> CargoResult { + self.inner.fingerprint(id) + } + + fn verify(&self, id: PackageId) -> CargoResult<()> { + let id = id.with_source_id(self.replace_with); + self.inner.verify(id) + } + + fn describe(&self) -> String { + format!( + "{} (which is replacing {})", + self.inner.describe(), + self.to_replace + ) + } + + fn is_replaced(&self) -> bool { + true + } + + fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { + let pkgs = pkgs + .iter() + .map(|id| id.with_source_id(self.replace_with)) + .collect::>(); + self.inner.add_to_yanked_whitelist(&pkgs); + } + + fn is_yanked(&mut self, pkg: PackageId) -> CargoResult { + self.inner.is_yanked(pkg) + } +} diff --git a/src/cargo/util/cfg.rs b/src/cargo/util/cfg.rs new file mode 100644 index 00000000000..ac5e9e644db --- /dev/null +++ b/src/cargo/util/cfg.rs @@ -0,0 +1,331 @@ +use std::fmt; +use std::iter; + +use std::str::{self, FromStr}; + +use serde::ser; + +use crate::util::errors::CargoResultExt; +use crate::util::CargoResult; + +#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] +pub enum Cfg { + Name(String), + KeyPair(String, String), +} + +#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] +pub enum CfgExpr { + Not(Box), + All(Vec), + Any(Vec), + Value(Cfg), +} + +#[derive(PartialEq)] +enum Token<'a> { + LeftParen, + RightParen, + Ident(&'a str), + Comma, + Equals, + String(&'a str), +} + +#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] +pub enum Platform { + Name(String), + Cfg(CfgExpr), +} + +struct Tokenizer<'a> { + s: iter::Peekable>, + orig: &'a str, +} + +struct Parser<'a> { + t: iter::Peekable>, +} + +impl FromStr for Cfg { + type Err = failure::Error; + + fn from_str(s: &str) -> CargoResult { + let mut p = Parser::new(s); + let e = p.cfg()?; + if p.t.next().is_some() { + failure::bail!("malformed cfg value or key/value pair: `{}`", s) + } + Ok(e) + } +} + +impl fmt::Display for Cfg { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Cfg::Name(ref s) => s.fmt(f), + Cfg::KeyPair(ref k, ref v) => write!(f, "{} = \"{}\"", k, v), + } + } +} + +impl CfgExpr { + /// Utility function to check if the key, "cfg(..)" matches the `target_cfg` + pub fn matches_key(key: &str, target_cfg: &[Cfg]) -> bool { + if key.starts_with("cfg(") && key.ends_with(')') { + let cfg = &key[4..key.len() - 1]; + + CfgExpr::from_str(cfg) + .ok() + .map(|ce| ce.matches(target_cfg)) + .unwrap_or(false) + } else { + false + } + } + + pub fn matches(&self, cfg: &[Cfg]) -> bool { + match *self { + CfgExpr::Not(ref e) => !e.matches(cfg), + CfgExpr::All(ref e) => e.iter().all(|e| e.matches(cfg)), + CfgExpr::Any(ref e) => e.iter().any(|e| e.matches(cfg)), + CfgExpr::Value(ref e) => cfg.contains(e), + } + } +} + +impl FromStr for CfgExpr { + type Err = failure::Error; + + fn from_str(s: &str) -> CargoResult { + let mut p = Parser::new(s); + let e = p.expr()?; + if p.t.next().is_some() { + failure::bail!( + "can only have one cfg-expression, consider using all() or \ + any() explicitly" + ) + } + Ok(e) + } +} + +impl fmt::Display for CfgExpr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + CfgExpr::Not(ref e) => write!(f, "not({})", e), + CfgExpr::All(ref e) => write!(f, "all({})", CommaSep(e)), + CfgExpr::Any(ref e) => write!(f, "any({})", CommaSep(e)), + CfgExpr::Value(ref e) => write!(f, "{}", e), + } + } +} + +struct CommaSep<'a, T>(&'a [T]); + +impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for (i, v) in self.0.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}", v)?; + } + Ok(()) + } +} + +impl<'a> Parser<'a> { + fn new(s: &'a str) -> Parser<'a> { + Parser { + t: Tokenizer { + s: s.char_indices().peekable(), + orig: s, + } + .peekable(), + } + } + + fn expr(&mut self) -> CargoResult { + match self.t.peek() { + Some(&Ok(Token::Ident(op @ "all"))) | Some(&Ok(Token::Ident(op @ "any"))) => { + self.t.next(); + let mut e = Vec::new(); + self.eat(&Token::LeftParen)?; + while !self.r#try(&Token::RightParen) { + e.push(self.expr()?); + if !self.r#try(&Token::Comma) { + self.eat(&Token::RightParen)?; + break; + } + } + if op == "all" { + Ok(CfgExpr::All(e)) + } else { + Ok(CfgExpr::Any(e)) + } + } + Some(&Ok(Token::Ident("not"))) => { + self.t.next(); + self.eat(&Token::LeftParen)?; + let e = self.expr()?; + self.eat(&Token::RightParen)?; + Ok(CfgExpr::Not(Box::new(e))) + } + Some(&Ok(..)) => self.cfg().map(CfgExpr::Value), + Some(&Err(..)) => Err(self.t.next().unwrap().err().unwrap()), + None => failure::bail!( + "expected start of a cfg expression, \ + found nothing" + ), + } + } + + fn cfg(&mut self) -> CargoResult { + match self.t.next() { + Some(Ok(Token::Ident(name))) => { + let e = if self.r#try(&Token::Equals) { + let val = match self.t.next() { + Some(Ok(Token::String(s))) => s, + Some(Ok(t)) => failure::bail!("expected a string, found {}", t.classify()), + Some(Err(e)) => return Err(e), + None => failure::bail!("expected a string, found nothing"), + }; + Cfg::KeyPair(name.to_string(), val.to_string()) + } else { + Cfg::Name(name.to_string()) + }; + Ok(e) + } + Some(Ok(t)) => failure::bail!("expected identifier, found {}", t.classify()), + Some(Err(e)) => Err(e), + None => failure::bail!("expected identifier, found nothing"), + } + } + + fn r#try(&mut self, token: &Token<'a>) -> bool { + match self.t.peek() { + Some(&Ok(ref t)) if token == t => {} + _ => return false, + } + self.t.next(); + true + } + + fn eat(&mut self, token: &Token<'a>) -> CargoResult<()> { + match self.t.next() { + Some(Ok(ref t)) if token == t => Ok(()), + Some(Ok(t)) => failure::bail!("expected {}, found {}", token.classify(), t.classify()), + Some(Err(e)) => Err(e), + None => failure::bail!("expected {}, but cfg expr ended", token.classify()), + } + } +} + +impl<'a> Iterator for Tokenizer<'a> { + type Item = CargoResult>; + + fn next(&mut self) -> Option>> { + loop { + match self.s.next() { + Some((_, ' ')) => {} + Some((_, '(')) => return Some(Ok(Token::LeftParen)), + Some((_, ')')) => return Some(Ok(Token::RightParen)), + Some((_, ',')) => return Some(Ok(Token::Comma)), + Some((_, '=')) => return Some(Ok(Token::Equals)), + Some((start, '"')) => { + while let Some((end, ch)) = self.s.next() { + if ch == '"' { + return Some(Ok(Token::String(&self.orig[start + 1..end]))); + } + } + return Some(Err(failure::format_err!("unterminated string in cfg"))); + } + Some((start, ch)) if is_ident_start(ch) => { + while let Some(&(end, ch)) = self.s.peek() { + if !is_ident_rest(ch) { + return Some(Ok(Token::Ident(&self.orig[start..end]))); + } else { + self.s.next(); + } + } + return Some(Ok(Token::Ident(&self.orig[start..]))); + } + Some((_, ch)) => { + return Some(Err(failure::format_err!( + "unexpected character in \ + cfg `{}`, expected parens, \ + a comma, an identifier, or \ + a string", + ch + ))); + } + None => return None, + } + } + } +} + +fn is_ident_start(ch: char) -> bool { + ch == '_' || ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') +} + +fn is_ident_rest(ch: char) -> bool { + is_ident_start(ch) || ('0' <= ch && ch <= '9') +} + +impl<'a> Token<'a> { + fn classify(&self) -> &str { + match *self { + Token::LeftParen => "`(`", + Token::RightParen => "`)`", + Token::Ident(..) => "an identifier", + Token::Comma => "`,`", + Token::Equals => "`=`", + Token::String(..) => "a string", + } + } +} + +impl Platform { + pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool { + match *self { + Platform::Name(ref p) => p == name, + Platform::Cfg(ref p) => p.matches(cfg), + } + } +} + +impl ser::Serialize for Platform { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + self.to_string().serialize(s) + } +} + +impl FromStr for Platform { + type Err = failure::Error; + + fn from_str(s: &str) -> CargoResult { + if s.starts_with("cfg(") && s.ends_with(')') { + let s = &s[4..s.len() - 1]; + let p = s.parse().map(Platform::Cfg).chain_err(|| { + failure::format_err!("failed to parse `{}` as a cfg expression", s) + })?; + Ok(p) + } else { + Ok(Platform::Name(s.to_string())) + } + } +} + +impl fmt::Display for Platform { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Platform::Name(ref n) => n.fmt(f), + Platform::Cfg(ref e) => write!(f, "cfg({})", e), + } + } +} diff --git a/src/cargo/util/command_prelude.rs b/src/cargo/util/command_prelude.rs new file mode 100644 index 00000000000..bb534be726c --- /dev/null +++ b/src/cargo/util/command_prelude.rs @@ -0,0 +1,524 @@ +use std::ffi::{OsStr, OsString}; +use std::fs; +use std::path::PathBuf; + +use crate::core::compiler::{BuildConfig, MessageFormat}; +use crate::core::Workspace; +use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl}; +use crate::sources::CRATES_IO_REGISTRY; +use crate::util::important_paths::find_root_manifest_for_wd; +use crate::util::{paths, validate_package_name}; +use crate::util::{ + print_available_benches, print_available_binaries, print_available_examples, + print_available_tests, +}; +use crate::CargoResult; +use clap::{self, SubCommand}; + +pub use crate::core::compiler::CompileMode; +pub use crate::{CliError, CliResult, Config}; +pub use clap::{AppSettings, Arg, ArgMatches}; + +pub type App = clap::App<'static, 'static>; + +pub trait AppExt: Sized { + fn _arg(self, arg: Arg<'static, 'static>) -> Self; + + fn arg_package_spec( + self, + package: &'static str, + all: &'static str, + exclude: &'static str, + ) -> Self { + self.arg_package_spec_simple(package) + ._arg(opt("all", all)) + ._arg(multi_opt("exclude", "SPEC", exclude)) + } + + fn arg_package_spec_simple(self, package: &'static str) -> Self { + self._arg(multi_opt("package", "SPEC", package).short("p")) + } + + fn arg_package(self, package: &'static str) -> Self { + self._arg(opt("package", package).short("p").value_name("SPEC")) + } + + fn arg_jobs(self) -> Self { + self._arg( + opt("jobs", "Number of parallel jobs, defaults to # of CPUs") + .short("j") + .value_name("N"), + ) + } + + fn arg_targets_all( + self, + lib: &'static str, + bin: &'static str, + bins: &'static str, + example: &'static str, + examples: &'static str, + test: &'static str, + tests: &'static str, + bench: &'static str, + benches: &'static str, + all: &'static str, + ) -> Self { + self.arg_targets_lib_bin(lib, bin, bins) + ._arg(optional_multi_opt("example", "NAME", example)) + ._arg(opt("examples", examples)) + ._arg(optional_multi_opt("test", "NAME", test)) + ._arg(opt("tests", tests)) + ._arg(optional_multi_opt("bench", "NAME", bench)) + ._arg(opt("benches", benches)) + ._arg(opt("all-targets", all)) + } + + fn arg_targets_lib_bin(self, lib: &'static str, bin: &'static str, bins: &'static str) -> Self { + self._arg(opt("lib", lib)) + ._arg(optional_multi_opt("bin", "NAME", bin)) + ._arg(opt("bins", bins)) + } + + fn arg_targets_bins_examples( + self, + bin: &'static str, + bins: &'static str, + example: &'static str, + examples: &'static str, + ) -> Self { + self._arg(optional_multi_opt("bin", "NAME", bin)) + ._arg(opt("bins", bins)) + ._arg(optional_multi_opt("example", "NAME", example)) + ._arg(opt("examples", examples)) + } + + fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self { + self._arg(optional_multi_opt("bin", "NAME", bin)) + ._arg(optional_multi_opt("example", "NAME", example)) + } + + fn arg_features(self) -> Self { + self._arg( + opt("features", "Space-separated list of features to activate") + .multiple(true) + .value_name("FEATURES"), + ) + ._arg(opt("all-features", "Activate all available features")) + ._arg(opt( + "no-default-features", + "Do not activate the `default` feature", + )) + } + + fn arg_release(self, release: &'static str) -> Self { + self._arg(opt("release", release)) + } + + fn arg_doc(self, doc: &'static str) -> Self { + self._arg(opt("doc", doc)) + } + + fn arg_target_triple(self, target: &'static str) -> Self { + self._arg(opt("target", target).value_name("TRIPLE")) + } + + fn arg_target_dir(self) -> Self { + self._arg( + opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY"), + ) + } + + fn arg_manifest_path(self) -> Self { + self._arg(opt("manifest-path", "Path to Cargo.toml").value_name("PATH")) + } + + fn arg_message_format(self) -> Self { + self._arg( + opt("message-format", "Error format") + .value_name("FMT") + .case_insensitive(true) + .possible_values(&["human", "json", "short"]) + .default_value("human"), + ) + } + + fn arg_build_plan(self) -> Self { + self._arg(opt( + "build-plan", + "Output the build plan in JSON (unstable)", + )) + } + + fn arg_new_opts(self) -> Self { + self._arg( + opt( + "vcs", + "Initialize a new repository for the given version \ + control system (git, hg, pijul, or fossil) or do not \ + initialize any version control at all (none), overriding \ + a global configuration.", + ) + .value_name("VCS") + .possible_values(&["git", "hg", "pijul", "fossil", "none"]), + ) + ._arg(opt("bin", "Use a binary (application) template [default]")) + ._arg(opt("lib", "Use a library template")) + ._arg( + opt("edition", "Edition to set for the crate generated") + .possible_values(&["2015", "2018"]) + .value_name("YEAR"), + ) + ._arg( + opt( + "name", + "Set the resulting package name, defaults to the directory name", + ) + .value_name("NAME"), + ) + } + + fn arg_index(self) -> Self { + self._arg(opt("index", "Registry index URL to upload the package to").value_name("INDEX")) + ._arg( + opt("host", "DEPRECATED, renamed to '--index'") + .value_name("HOST") + .hidden(true), + ) + } + + fn arg_dry_run(self, dry_run: &'static str) -> Self { + self._arg(opt("dry-run", dry_run)) + } +} + +impl AppExt for App { + fn _arg(self, arg: Arg<'static, 'static>) -> Self { + self.arg(arg) + } +} + +pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> { + Arg::with_name(name).long(name).help(help) +} + +pub fn optional_multi_opt( + name: &'static str, + value_name: &'static str, + help: &'static str, +) -> Arg<'static, 'static> { + opt(name, help) + .value_name(value_name) + .multiple(true) + .min_values(0) + .number_of_values(1) +} + +pub fn multi_opt( + name: &'static str, + value_name: &'static str, + help: &'static str, +) -> Arg<'static, 'static> { + // Note that all `.multiple(true)` arguments in Cargo should specify + // `.number_of_values(1)` as well, so that `--foo val1 val2` is + // *not* parsed as `foo` with values ["val1", "val2"]. + // `number_of_values` should become the default in clap 3. + opt(name, help) + .value_name(value_name) + .multiple(true) + .number_of_values(1) +} + +pub fn subcommand(name: &'static str) -> App { + SubCommand::with_name(name).settings(&[ + AppSettings::UnifiedHelpMessage, + AppSettings::DeriveDisplayOrder, + AppSettings::DontCollapseArgsInUsage, + ]) +} + +pub trait ArgMatchesExt { + fn value_of_u32(&self, name: &str) -> CargoResult> { + let arg = match self._value_of(name) { + None => None, + Some(arg) => Some(arg.parse::().map_err(|_| { + clap::Error::value_validation_auto(format!("could not parse `{}` as a number", arg)) + })?), + }; + Ok(arg) + } + + /// Returns value of the `name` command-line argument as an absolute path + fn value_of_path(&self, name: &str, config: &Config) -> Option { + self._value_of(name).map(|path| config.cwd().join(path)) + } + + fn root_manifest(&self, config: &Config) -> CargoResult { + if let Some(path) = self.value_of_path("manifest-path", config) { + // In general, we try to avoid normalizing paths in Cargo, + // but in this particular case we need it to fix #3586. + let path = paths::normalize_path(&path); + if !path.ends_with("Cargo.toml") { + failure::bail!("the manifest-path must be a path to a Cargo.toml file") + } + if fs::metadata(&path).is_err() { + failure::bail!( + "manifest path `{}` does not exist", + self._value_of("manifest-path").unwrap() + ) + } + return Ok(path); + } + find_root_manifest_for_wd(config.cwd()) + } + + fn workspace<'a>(&self, config: &'a Config) -> CargoResult> { + let root = self.root_manifest(config)?; + let mut ws = Workspace::new(&root, config)?; + if config.cli_unstable().avoid_dev_deps { + ws.set_require_optional_deps(false); + } + Ok(ws) + } + + fn jobs(&self) -> CargoResult> { + self.value_of_u32("jobs") + } + + fn target(&self) -> Option { + self._value_of("target").map(|s| s.to_string()) + } + + fn compile_options<'a>( + &self, + config: &'a Config, + mode: CompileMode, + workspace: Option<&Workspace<'a>>, + ) -> CargoResult> { + let spec = Packages::from_flags( + self._is_present("all"), + self._values_of("exclude"), + self._values_of("package"), + )?; + + let message_format = match self._value_of("message-format") { + None => MessageFormat::Human, + Some(f) => { + if f.eq_ignore_ascii_case("json") { + MessageFormat::Json + } else if f.eq_ignore_ascii_case("human") { + MessageFormat::Human + } else if f.eq_ignore_ascii_case("short") { + MessageFormat::Short + } else { + panic!("Impossible message format: {:?}", f) + } + } + }; + + let mut build_config = BuildConfig::new(config, self.jobs()?, &self.target(), mode)?; + build_config.message_format = message_format; + build_config.release = self._is_present("release"); + build_config.build_plan = self._is_present("build-plan"); + if build_config.build_plan { + config + .cli_unstable() + .fail_if_stable_opt("--build-plan", 5579)?; + }; + + let opts = CompileOptions { + config, + build_config, + features: self._values_of("features"), + all_features: self._is_present("all-features"), + no_default_features: self._is_present("no-default-features"), + spec, + filter: CompileFilter::from_raw_arguments( + self._is_present("lib"), + self._values_of("bin"), + self._is_present("bins"), + self._values_of("test"), + self._is_present("tests"), + self._values_of("example"), + self._is_present("examples"), + self._values_of("bench"), + self._is_present("benches"), + self._is_present("all-targets"), + ), + target_rustdoc_args: None, + target_rustc_args: None, + local_rustdoc_args: None, + export_dir: None, + }; + + if let Some(ws) = workspace { + self.check_optional_opts(ws, &opts)?; + } + + Ok(opts) + } + + fn compile_options_for_single_package<'a>( + &self, + config: &'a Config, + mode: CompileMode, + workspace: Option<&Workspace<'a>>, + ) -> CargoResult> { + let mut compile_opts = self.compile_options(config, mode, workspace)?; + compile_opts.spec = Packages::Packages(self._values_of("package")); + Ok(compile_opts) + } + + fn new_options(&self, config: &Config) -> CargoResult { + let vcs = self._value_of("vcs").map(|vcs| match vcs { + "git" => VersionControl::Git, + "hg" => VersionControl::Hg, + "pijul" => VersionControl::Pijul, + "fossil" => VersionControl::Fossil, + "none" => VersionControl::NoVcs, + vcs => panic!("Impossible vcs: {:?}", vcs), + }); + NewOptions::new( + vcs, + self._is_present("bin"), + self._is_present("lib"), + self.value_of_path("path", config).unwrap(), + self._value_of("name").map(|s| s.to_string()), + self._value_of("edition").map(|s| s.to_string()), + self.registry(config)?, + ) + } + + fn registry(&self, config: &Config) -> CargoResult> { + match self._value_of("registry") { + Some(registry) => { + validate_package_name(registry, "registry name", "")?; + + if registry == CRATES_IO_REGISTRY { + // If "crates.io" is specified, then we just need to return `None`, + // as that will cause cargo to use crates.io. This is required + // for the case where a default alternative registry is used + // but the user wants to switch back to crates.io for a single + // command. + Ok(None) + } else { + Ok(Some(registry.to_string())) + } + } + None => config.default_registry(), + } + } + + fn index(&self, config: &Config) -> CargoResult> { + // TODO: deprecated. Remove once it has been decided `--host` can be removed + // We may instead want to repurpose the host flag, as mentioned in issue + // rust-lang/cargo#4208. + let msg = "The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning."; + + let index = match self._value_of("host") { + Some(host) => { + config.shell().warn(&msg)?; + Some(host.to_string()) + } + None => self._value_of("index").map(|s| s.to_string()), + }; + Ok(index) + } + + fn check_optional_opts( + &self, + workspace: &Workspace<'_>, + compile_opts: &CompileOptions<'_>, + ) -> CargoResult<()> { + if self.is_present_with_zero_values("example") { + print_available_examples(workspace, compile_opts)?; + } + + if self.is_present_with_zero_values("bin") { + print_available_binaries(workspace, compile_opts)?; + } + + if self.is_present_with_zero_values("bench") { + print_available_benches(workspace, compile_opts)?; + } + + if self.is_present_with_zero_values("test") { + print_available_tests(workspace, compile_opts)?; + } + + Ok(()) + } + + fn is_present_with_zero_values(&self, name: &str) -> bool { + self._is_present(name) && self._value_of(name).is_none() + } + + fn _value_of(&self, name: &str) -> Option<&str>; + + fn _values_of(&self, name: &str) -> Vec; + + fn _value_of_os(&self, name: &str) -> Option<&OsStr>; + + fn _values_of_os(&self, name: &str) -> Vec; + + fn _is_present(&self, name: &str) -> bool; +} + +impl<'a> ArgMatchesExt for ArgMatches<'a> { + fn _value_of(&self, name: &str) -> Option<&str> { + self.value_of(name) + } + + fn _value_of_os(&self, name: &str) -> Option<&OsStr> { + self.value_of_os(name) + } + + fn _values_of(&self, name: &str) -> Vec { + self.values_of(name) + .unwrap_or_default() + .map(|s| s.to_string()) + .collect() + } + + fn _values_of_os(&self, name: &str) -> Vec { + self.values_of_os(name) + .unwrap_or_default() + .map(|s| s.to_os_string()) + .collect() + } + + fn _is_present(&self, name: &str) -> bool { + self.is_present(name) + } +} + +pub fn values(args: &ArgMatches<'_>, name: &str) -> Vec { + args._values_of(name) +} + +pub fn values_os(args: &ArgMatches<'_>, name: &str) -> Vec { + args._values_of_os(name) +} + +#[derive(PartialEq, PartialOrd, Eq, Ord)] +pub enum CommandInfo { + BuiltIn { name: String, about: Option }, + External { name: String, path: PathBuf }, +} + +impl CommandInfo { + pub fn name(&self) -> &str { + match self { + CommandInfo::BuiltIn { name, .. } => name, + CommandInfo::External { name, .. } => name, + } + } +} diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs index 88d0aacc695..fa7b2b2bcb0 100644 --- a/src/cargo/util/config.rs +++ b/src/cargo/util/config.rs @@ -1,113 +1,342 @@ -use std::cell::{RefCell, RefMut, Ref, Cell}; +use std::cell::{RefCell, RefMut}; use std::collections::hash_map::Entry::{Occupied, Vacant}; -use std::collections::hash_map::{HashMap}; +use std::collections::hash_map::HashMap; +use std::collections::HashSet; use std::env; -use std::ffi::OsString; use std::fmt; use std::fs::{self, File}; use std::io::prelude::*; +use std::io::{self, SeekFrom}; use std::mem; use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::sync::Once; +use std::time::Instant; +use std::vec; -use rustc_serialize::{Encodable,Encoder}; -use toml; -use core::{MultiShell, Package}; -use util::{CargoResult, ChainError, Rustc, internal, human}; - -use util::toml as cargo_toml; +use curl::easy::Easy; +use lazycell::LazyCell; +use serde::Deserialize; +use serde::{de, de::IntoDeserializer}; +use url::Url; use self::ConfigValue as CV; +use crate::core::profiles::ConfigProfiles; +use crate::core::shell::Verbosity; +use crate::core::{CliUnstable, Shell, SourceId, Workspace}; +use crate::ops; +use crate::util::errors::{self, internal, CargoResult, CargoResultExt}; +use crate::util::toml as cargo_toml; +use crate::util::Filesystem; +use crate::util::Rustc; +use crate::util::{paths, validate_package_name, FileLock}; +use crate::util::{IntoUrl, IntoUrlWithBase}; +/// Configuration information for cargo. This is not specific to a build, it is information +/// relating to cargo itself. +/// +/// This struct implements `Default`: all fields can be inferred. +#[derive(Debug)] pub struct Config { - home_path: PathBuf, - shell: RefCell, - rustc_info: Rustc, - values: RefCell>, - values_loaded: Cell, + /// The location of the user's 'home' directory. OS-dependent. + home_path: Filesystem, + /// Information about how to write messages to the shell + shell: RefCell, + /// A collection of configuration options + values: LazyCell>, + /// The current working directory of cargo cwd: PathBuf, - rustc: PathBuf, - rustdoc: PathBuf, - target_dir: Option, + /// The location of the cargo executable (path to current process) + cargo_exe: LazyCell, + /// The location of the rustdoc executable + rustdoc: LazyCell, + /// Whether we are printing extra verbose messages + extra_verbose: bool, + /// `frozen` is the same as `locked`, but additionally will not access the + /// network to determine if the lock file is out-of-date. + frozen: bool, + /// `locked` is set if we should not update lock files. If the lock file + /// is missing, or needs to be updated, an error is produced. + locked: bool, + /// `offline` is set if we should never access the network, but otherwise + /// continue operating if possible. + offline: bool, + /// A global static IPC control mechanism (used for managing parallel builds) + jobserver: Option, + /// Cli flags of the form "-Z something" + cli_flags: CliUnstable, + /// A handle on curl easy mode for http calls + easy: LazyCell>, + /// Cache of the `SourceId` for crates.io + crates_io_source_id: LazyCell, + /// If false, don't cache `rustc --version --verbose` invocations + cache_rustc_info: bool, + /// Creation time of this config, used to output the total build time + creation_time: Instant, + /// Target Directory via resolved Cli parameter + target_dir: Option, + /// Environment variables, separated to assist testing. + env: HashMap, + /// Profiles loaded from config. + profiles: LazyCell, + /// Tracks which sources have been updated to avoid multiple updates. + updated_sources: LazyCell>>, + /// Lock, if held, of the global package cache along with the number of + /// acquisitions so far. + package_cache_lock: RefCell, usize)>>, } impl Config { - pub fn new(shell: MultiShell) -> CargoResult { - let cwd = try!(env::current_dir().chain_error(|| { - human("couldn't get the current directory of the process") - })); - - let mut cfg = Config { - home_path: try!(homedir(cwd.as_path()).chain_error(|| { - human("Cargo couldn't find your home directory. \ - This probably means that $HOME was not set.") - })), - shell: RefCell::new(shell), - rustc_info: Rustc::blank(), - cwd: cwd, - values: RefCell::new(HashMap::new()), - values_loaded: Cell::new(false), - rustc: PathBuf::from("rustc"), - rustdoc: PathBuf::from("rustdoc"), - target_dir: None, - }; + pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config { + static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _; + static INIT: Once = Once::new(); - try!(cfg.scrape_tool_config()); - try!(cfg.scrape_rustc_version()); - try!(cfg.scrape_target_dir_config()); + // This should be called early on in the process, so in theory the + // unsafety is ok here. (taken ownership of random fds) + INIT.call_once(|| unsafe { + if let Some(client) = jobserver::Client::from_env() { + GLOBAL_JOBSERVER = Box::into_raw(Box::new(client)); + } + }); - Ok(cfg) + let env: HashMap<_, _> = env::vars_os() + .filter_map(|(k, v)| { + // Ignore any key/values that are not valid Unicode. + match (k.into_string(), v.into_string()) { + (Ok(k), Ok(v)) => Some((k, v)), + _ => None, + } + }) + .collect(); + + let cache_rustc_info = match env.get("CARGO_CACHE_RUSTC_INFO") { + Some(cache) => cache != "0", + _ => true, + }; + + Config { + home_path: Filesystem::new(homedir), + shell: RefCell::new(shell), + cwd, + values: LazyCell::new(), + cargo_exe: LazyCell::new(), + rustdoc: LazyCell::new(), + extra_verbose: false, + frozen: false, + locked: false, + offline: false, + jobserver: unsafe { + if GLOBAL_JOBSERVER.is_null() { + None + } else { + Some((*GLOBAL_JOBSERVER).clone()) + } + }, + cli_flags: CliUnstable::default(), + easy: LazyCell::new(), + crates_io_source_id: LazyCell::new(), + cache_rustc_info, + creation_time: Instant::now(), + target_dir: None, + env, + profiles: LazyCell::new(), + updated_sources: LazyCell::new(), + package_cache_lock: RefCell::new(None), + } } - pub fn home(&self) -> &Path { &self.home_path } + pub fn default() -> CargoResult { + let shell = Shell::new(); + let cwd = + env::current_dir().chain_err(|| "couldn't get the current directory of the process")?; + let homedir = homedir(&cwd).ok_or_else(|| { + failure::format_err!( + "Cargo couldn't find your home directory. \ + This probably means that $HOME was not set." + ) + })?; + Ok(Config::new(shell, cwd, homedir)) + } - pub fn git_db_path(&self) -> PathBuf { - self.home_path.join("git").join("db") + /// Gets the user's Cargo home directory (OS-dependent). + pub fn home(&self) -> &Filesystem { + &self.home_path } - pub fn git_checkout_path(&self) -> PathBuf { - self.home_path.join("git").join("checkouts") + /// Gets the Cargo Git directory (`/git`). + pub fn git_path(&self) -> Filesystem { + self.home_path.join("git") } - pub fn registry_index_path(&self) -> PathBuf { + /// Gets the Cargo registry index directory (`/registry/index`). + pub fn registry_index_path(&self) -> Filesystem { self.home_path.join("registry").join("index") } - pub fn registry_cache_path(&self) -> PathBuf { + /// Gets the Cargo registry cache directory (`/registry/path`). + pub fn registry_cache_path(&self) -> Filesystem { self.home_path.join("registry").join("cache") } - pub fn registry_source_path(&self) -> PathBuf { + /// Gets the Cargo registry source directory (`/registry/src`). + pub fn registry_source_path(&self) -> Filesystem { self.home_path.join("registry").join("src") } - pub fn shell(&self) -> RefMut { + /// Gets the default Cargo registry. + pub fn default_registry(&self) -> CargoResult> { + Ok(match self.get_string("registry.default")? { + Some(registry) => Some(registry.val), + None => None, + }) + } + + /// Gets a reference to the shell, e.g., for writing error messages. + pub fn shell(&self) -> RefMut<'_, Shell> { self.shell.borrow_mut() } - pub fn rustc(&self) -> &Path { &self.rustc } + /// Gets the path to the `rustdoc` executable. + pub fn rustdoc(&self) -> CargoResult<&Path> { + self.rustdoc + .try_borrow_with(|| self.get_tool("rustdoc")) + .map(AsRef::as_ref) + } + + /// Gets the path to the `rustc` executable. + pub fn load_global_rustc(&self, ws: Option<&Workspace<'_>>) -> CargoResult { + let cache_location = ws.map(|ws| { + ws.target_dir() + .join(".rustc_info.json") + .into_path_unlocked() + }); + let wrapper = self.maybe_get_tool("rustc_wrapper")?; + Rustc::new( + self.get_tool("rustc")?, + wrapper, + &self + .home() + .join("bin") + .join("rustc") + .into_path_unlocked() + .with_extension(env::consts::EXE_EXTENSION), + if self.cache_rustc_info { + cache_location + } else { + None + }, + ) + } - pub fn rustdoc(&self) -> &Path { &self.rustdoc } + /// Gets the path to the `cargo` executable. + pub fn cargo_exe(&self) -> CargoResult<&Path> { + self.cargo_exe + .try_borrow_with(|| { + fn from_current_exe() -> CargoResult { + // Try fetching the path to `cargo` using `env::current_exe()`. + // The method varies per operating system and might fail; in particular, + // it depends on `/proc` being mounted on Linux, and some environments + // (like containers or chroots) may not have that available. + let exe = env::current_exe()?.canonicalize()?; + Ok(exe) + } - pub fn rustc_info(&self) -> &Rustc { &self.rustc_info } + fn from_argv() -> CargoResult { + // Grab `argv[0]` and attempt to resolve it to an absolute path. + // If `argv[0]` has one component, it must have come from a `PATH` lookup, + // so probe `PATH` in that case. + // Otherwise, it has multiple components and is either: + // - a relative path (e.g., `./cargo`, `target/debug/cargo`), or + // - an absolute path (e.g., `/usr/local/bin/cargo`). + // In either case, `Path::canonicalize` will return the full absolute path + // to the target if it exists. + let argv0 = env::args_os() + .map(PathBuf::from) + .next() + .ok_or_else(|| failure::format_err!("no argv[0]"))?; + paths::resolve_executable(&argv0) + } - pub fn values(&self) -> CargoResult>> { - if !self.values_loaded.get() { - try!(self.load_values()); - self.values_loaded.set(true); + let exe = from_current_exe() + .or_else(|_| from_argv()) + .chain_err(|| "couldn't get the path to cargo executable")?; + Ok(exe) + }) + .map(AsRef::as_ref) + } + + pub fn profiles(&self) -> CargoResult<&ConfigProfiles> { + self.profiles.try_borrow_with(|| { + let ocp = self.get::>("profile")?; + if let Some(config_profiles) = ocp { + // Warn if config profiles without CLI option. + if !self.cli_unstable().config_profile { + self.shell().warn( + "profiles in config files require `-Z config-profile` \ + command-line option", + )?; + return Ok(ConfigProfiles::default()); + } + Ok(config_profiles) + } else { + Ok(ConfigProfiles::default()) + } + }) + } + + pub fn updated_sources(&self) -> RefMut<'_, HashSet> { + self.updated_sources + .borrow_with(|| RefCell::new(HashSet::new())) + .borrow_mut() + } + + pub fn values(&self) -> CargoResult<&HashMap> { + self.values.try_borrow_with(|| self.load_values()) + } + + pub fn values_mut(&mut self) -> CargoResult<&mut HashMap> { + match self.values.borrow_mut() { + Some(map) => Ok(map), + None => failure::bail!("config values not loaded yet"), + } + } + + // Note: this is used by RLS, not Cargo. + pub fn set_values(&self, values: HashMap) -> CargoResult<()> { + if self.values.borrow().is_some() { + failure::bail!("config values already found") + } + match self.values.fill(values) { + Ok(()) => Ok(()), + Err(_) => failure::bail!("could not fill values"), } - Ok(self.values.borrow()) } - pub fn cwd(&self) -> &Path { &self.cwd } + pub fn reload_rooted_at>(&mut self, path: P) -> CargoResult<()> { + let values = self.load_values_from(path.as_ref())?; + self.values.replace(values); + Ok(()) + } - pub fn target_dir(&self, pkg: &Package) -> PathBuf { - self.target_dir.clone().unwrap_or_else(|| { - pkg.root().join("target") - }) + pub fn cwd(&self) -> &Path { + &self.cwd + } + + pub fn target_dir(&self) -> CargoResult> { + if let Some(ref dir) = self.target_dir { + Ok(Some(dir.clone())) + } else if let Some(dir) = env::var_os("CARGO_TARGET_DIR") { + Ok(Some(Filesystem::new(self.cwd.join(dir)))) + } else if let Some(val) = self.get_path("build.target-dir")? { + let val = self.cwd.join(val.val); + Ok(Some(Filesystem::new(val))) + } else { + Ok(None) + } } - pub fn get(&self, key: &str) -> CargoResult> { - let vals = try!(self.values()); + fn get_cv(&self, key: &str) -> CargoResult> { + let vals = self.values()?; let mut parts = key.split('.').enumerate(); let mut val = match vals.get(parts.next().unwrap().1) { Some(val) => val, @@ -121,148 +350,1105 @@ impl Config { None => return Ok(None), } } - CV::Integer(_, ref path) | - CV::String(_, ref path) | - CV::List(_, ref path) | - CV::Boolean(_, ref path) => { - let idx = key.split('.').take(i) - .fold(0, |n, s| n + s.len()) + i - 1; + CV::Integer(_, ref path) + | CV::String(_, ref path) + | CV::List(_, ref path) + | CV::Boolean(_, ref path) => { + let idx = key.split('.').take(i).fold(0, |n, s| n + s.len()) + i - 1; let key_so_far = &key[..idx]; - return Err(human(format!("expected table for configuration \ - key `{}`, but found {} in {}", - key_so_far, val.desc(), - path.display()))); + failure::bail!( + "expected table for configuration key `{}`, \ + but found {} in {}", + key_so_far, + val.desc(), + path.display() + ) } } } Ok(Some(val.clone())) } - pub fn get_string(&self, key: &str) -> CargoResult> { - match try!(self.get(key)) { - Some(CV::String(i, path)) => Ok(Some((i, path))), - Some(val) => self.expected("string", key, val), + // Helper primarily for testing. + pub fn set_env(&mut self, env: HashMap) { + self.env = env; + } + + fn get_env(&self, key: &ConfigKey) -> Result, ConfigError> + where + T: FromStr, + ::Err: fmt::Display, + { + let key = key.to_env(); + match self.env.get(&key) { + Some(value) => { + let definition = Definition::Environment(key); + Ok(Some(Value { + val: value + .parse() + .map_err(|e| ConfigError::new(format!("{}", e), definition.clone()))?, + definition, + })) + } None => Ok(None), } } - pub fn get_path(&self, key: &str) -> CargoResult> { - if let Some((specified_path, path_to_config)) = try!(self.get_string(&key)) { - if specified_path.contains("/") || (cfg!(windows) && specified_path.contains("\\")) { - // An absolute or a relative path - let prefix_path = path_to_config.parent().unwrap().parent().unwrap(); - // Joining an absolute path to any path results in the given absolute path - Ok(Some(prefix_path.join(specified_path))) - } else { - // A pathless name - Ok(Some(PathBuf::from(specified_path))) + fn has_key(&self, key: &ConfigKey) -> bool { + let env_key = key.to_env(); + if self.env.get(&env_key).is_some() { + return true; + } + let env_pattern = format!("{}_", env_key); + if self.env.keys().any(|k| k.starts_with(&env_pattern)) { + return true; + } + if let Ok(o_cv) = self.get_cv(&key.to_config()) { + if o_cv.is_some() { + return true; + } + } + false + } + + pub fn get_string(&self, key: &str) -> CargoResult> { + self.get_string_priv(&ConfigKey::from_str(key)) + .map_err(|e| e.into()) + } + + fn get_string_priv(&self, key: &ConfigKey) -> Result, ConfigError> { + match self.get_env(key)? { + Some(v) => Ok(Some(v)), + None => { + let config_key = key.to_config(); + let o_cv = self.get_cv(&config_key)?; + match o_cv { + Some(CV::String(s, path)) => Ok(Some(Value { + val: s, + definition: Definition::Path(path), + })), + Some(cv) => Err(ConfigError::expected(&config_key, "a string", &cv)), + None => Ok(None), + } } + } + } + + pub fn get_bool(&self, key: &str) -> CargoResult> { + self.get_bool_priv(&ConfigKey::from_str(key)) + .map_err(|e| e.into()) + } + + fn get_bool_priv(&self, key: &ConfigKey) -> Result, ConfigError> { + match self.get_env(key)? { + Some(v) => Ok(Some(v)), + None => { + let config_key = key.to_config(); + let o_cv = self.get_cv(&config_key)?; + match o_cv { + Some(CV::Boolean(b, path)) => Ok(Some(Value { + val: b, + definition: Definition::Path(path), + })), + Some(cv) => Err(ConfigError::expected(&config_key, "true/false", &cv)), + None => Ok(None), + } + } + } + } + + fn string_to_path(&self, value: String, definition: &Definition) -> PathBuf { + let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\')); + if is_path { + definition.root(self).join(value) + } else { + // A pathless name. + PathBuf::from(value) + } + } + + pub fn get_path(&self, key: &str) -> CargoResult> { + if let Some(val) = self.get_string(key)? { + Ok(Some(Value { + val: self.string_to_path(val.val, &val.definition), + definition: val.definition, + })) } else { Ok(None) } } - pub fn get_list(&self, key: &str) -> CargoResult, PathBuf)>> { - match try!(self.get(key)) { - Some(CV::List(i, path)) => Ok(Some((i, path))), - Some(val) => self.expected("list", key, val), + pub fn get_path_and_args(&self, key: &str) -> CargoResult)>> { + if let Some(mut val) = self.get_list_or_split_string(key)? { + if !val.val.is_empty() { + return Ok(Some(Value { + val: ( + self.string_to_path(val.val.remove(0), &val.definition), + val.val, + ), + definition: val.definition, + })); + } + } + Ok(None) + } + + // NOTE: this does **not** support environment variables. Use `get` instead + // if you want that. + pub fn get_list(&self, key: &str) -> CargoResult>> { + match self.get_cv(key)? { + Some(CV::List(i, path)) => Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })), + Some(val) => self.expected("list", key, &val), None => Ok(None), } } - pub fn get_table(&self, key: &str) - -> CargoResult, PathBuf)>> { - match try!(self.get(key)) { - Some(CV::Table(i, path)) => Ok(Some((i, path))), - Some(val) => self.expected("table", key, val), + pub fn get_list_or_split_string(&self, key: &str) -> CargoResult>> { + if let Some(value) = self.get_env::(&ConfigKey::from_str(key))? { + return Ok(Some(Value { + val: value.val.split(' ').map(str::to_string).collect(), + definition: value.definition, + })); + } + + match self.get_cv(key)? { + Some(CV::List(i, path)) => Ok(Some(Value { + val: i.into_iter().map(|(s, _)| s).collect(), + definition: Definition::Path(path), + })), + Some(CV::String(i, path)) => Ok(Some(Value { + val: i.split(' ').map(str::to_string).collect(), + definition: Definition::Path(path), + })), + Some(val) => self.expected("list or string", key, &val), None => Ok(None), } } - pub fn get_i64(&self, key: &str) -> CargoResult> { - match try!(self.get(key)) { - Some(CV::Integer(i, path)) => Ok(Some((i, path))), - Some(val) => self.expected("integer", key, val), + pub fn get_table(&self, key: &str) -> CargoResult>> { + match self.get_cv(key)? { + Some(CV::Table(i, path)) => Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })), + Some(val) => self.expected("table", key, &val), None => Ok(None), } } - pub fn expected(&self, ty: &str, key: &str, val: CV) -> CargoResult { - val.expected(ty).map_err(|e| { - human(format!("invalid configuration for key `{}`\n{}", key, e)) - }) + // Recommended to use `get` if you want a specific type, such as an unsigned value. + // Example: `config.get::>("some.key")?`. + pub fn get_i64(&self, key: &str) -> CargoResult> { + self.get_integer(&ConfigKey::from_str(key)) + .map_err(|e| e.into()) + } + + fn get_integer(&self, key: &ConfigKey) -> Result, ConfigError> { + let config_key = key.to_config(); + match self.get_env::(key)? { + Some(v) => Ok(Some(v)), + None => match self.get_cv(&config_key)? { + Some(CV::Integer(i, path)) => Ok(Some(Value { + val: i, + definition: Definition::Path(path), + })), + Some(cv) => Err(ConfigError::expected(&config_key, "an integer", &cv)), + None => Ok(None), + }, + } + } + + fn expected(&self, ty: &str, key: &str, val: &CV) -> CargoResult { + val.expected(ty, key) + .map_err(|e| failure::format_err!("invalid configuration for key `{}`\n{}", key, e)) + } + + pub fn configure( + &mut self, + verbose: u32, + quiet: Option, + color: &Option, + frozen: bool, + locked: bool, + offline: bool, + target_dir: &Option, + unstable_flags: &[String], + ) -> CargoResult<()> { + let extra_verbose = verbose >= 2; + let verbose = if verbose == 0 { None } else { Some(true) }; + + // Ignore errors in the configuration files. + let cfg_verbose = self.get_bool("term.verbose").unwrap_or(None).map(|v| v.val); + let cfg_color = self.get_string("term.color").unwrap_or(None).map(|v| v.val); + + let color = color.as_ref().or_else(|| cfg_color.as_ref()); + + let verbosity = match (verbose, cfg_verbose, quiet) { + (Some(true), _, None) | (None, Some(true), None) => Verbosity::Verbose, + + // Command line takes precedence over configuration, so ignore the + // configuration.. + (None, _, Some(true)) => Verbosity::Quiet, + + // Can't pass both at the same time on the command line regardless + // of configuration. + (Some(true), _, Some(true)) => { + failure::bail!("cannot set both --verbose and --quiet"); + } + + // Can't actually get `Some(false)` as a value from the command + // line, so just ignore them here to appease exhaustiveness checking + // in match statements. + (Some(false), _, _) + | (_, _, Some(false)) + | (None, Some(false), None) + | (None, None, None) => Verbosity::Normal, + }; + + let cli_target_dir = match target_dir.as_ref() { + Some(dir) => Some(Filesystem::new(dir.clone())), + None => None, + }; + + self.shell().set_verbosity(verbosity); + self.shell().set_color_choice(color.map(|s| &s[..]))?; + self.extra_verbose = extra_verbose; + self.frozen = frozen; + self.locked = locked; + self.offline = offline + || self + .get::>("net.offline") + .unwrap_or(None) + .unwrap_or(false); + self.target_dir = cli_target_dir; + self.cli_flags.parse(unstable_flags)?; + + Ok(()) } - fn load_values(&self) -> CargoResult<()> { + pub fn cli_unstable(&self) -> &CliUnstable { + &self.cli_flags + } + + pub fn extra_verbose(&self) -> bool { + self.extra_verbose + } + + pub fn network_allowed(&self) -> bool { + !self.frozen() && !self.offline() + } + + pub fn offline(&self) -> bool { + self.offline + } + + pub fn frozen(&self) -> bool { + self.frozen + } + + pub fn lock_update_allowed(&self) -> bool { + !self.frozen && !self.locked + } + + /// Loads configuration from the filesystem. + pub fn load_values(&self) -> CargoResult> { + self.load_values_from(&self.cwd) + } + + fn load_values_from(&self, path: &Path) -> CargoResult> { let mut cfg = CV::Table(HashMap::new(), PathBuf::from(".")); + let home = self.home_path.clone().into_path_unlocked(); - try!(walk_tree(&self.cwd, |mut file, path| { + walk_tree(path, &home, |path| { let mut contents = String::new(); - try!(file.read_to_string(&mut contents)); - let table = try!(cargo_toml::parse(&contents, &path).chain_error(|| { - human(format!("could not parse TOML configuration in `{}`", - path.display())) - })); - let toml = toml::Value::Table(table); - let value = try!(CV::from_toml(&path, toml).chain_error(|| { - human(format!("failed to load TOML configuration from `{}`", - path.display())) - })); - try!(cfg.merge(value)); + let mut file = File::open(&path)?; + file.read_to_string(&mut contents) + .chain_err(|| format!("failed to read configuration file `{}`", path.display()))?; + let toml = cargo_toml::parse(&contents, path, self).chain_err(|| { + format!("could not parse TOML configuration in `{}`", path.display()) + })?; + let value = CV::from_toml(path, toml).chain_err(|| { + format!( + "failed to load TOML configuration from `{}`", + path.display() + ) + })?; + cfg.merge(value) + .chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?; Ok(()) - }).chain_error(|| human("Couldn't load Cargo configuration"))); - + }) + .chain_err(|| "could not load Cargo configuration")?; - *self.values.borrow_mut() = match cfg { - CV::Table(map, _) => map, + self.load_credentials(&mut cfg)?; + match cfg { + CV::Table(map, _) => Ok(map), _ => unreachable!(), - }; - Ok(()) + } } - fn scrape_tool_config(&mut self) -> CargoResult<()> { - self.rustc = try!(self.get_tool("rustc")); - self.rustdoc = try!(self.get_tool("rustdoc")); - Ok(()) + /// Gets the index for a registry. + pub fn get_registry_index(&self, registry: &str) -> CargoResult { + validate_package_name(registry, "registry name", "")?; + Ok( + match self.get_string(&format!("registries.{}.index", registry))? { + Some(index) => self.resolve_registry_index(index)?, + None => failure::bail!("No index found for registry: `{}`", registry), + }, + ) } - fn scrape_rustc_version(&mut self) -> CargoResult<()> { - self.rustc_info = try!(Rustc::new(&self.rustc)); - Ok(()) + /// Gets the index for the default registry. + pub fn get_default_registry_index(&self) -> CargoResult> { + Ok(match self.get_string("registry.index")? { + Some(index) => Some(self.resolve_registry_index(index)?), + None => None, + }) } - fn scrape_target_dir_config(&mut self) -> CargoResult<()> { - if let Some((dir, dir2)) = try!(self.get_string("build.target-dir")) { - let mut path = PathBuf::from(dir2); - path.pop(); - path.pop(); - path.push(dir); - self.target_dir = Some(path); - } else if let Some(dir) = env::var_os("CARGO_TARGET_DIR") { - self.target_dir = Some(self.cwd.join(dir)); + fn resolve_registry_index(&self, index: Value) -> CargoResult { + let base = index + .definition + .root(self) + .join("truncated-by-url_with_base"); + // Parse val to check it is a URL, not a relative path without a protocol. + let _parsed = index.val.into_url()?; + let url = index.val.into_url_with_base(Some(&*base))?; + if url.password().is_some() { + failure::bail!("Registry URLs may not contain passwords"); + } + Ok(url) + } + + /// Loads credentials config from the credentials file into the `ConfigValue` object, if + /// present. + fn load_credentials(&self, cfg: &mut ConfigValue) -> CargoResult<()> { + let home_path = self.home_path.clone().into_path_unlocked(); + let credentials = home_path.join("credentials"); + if fs::metadata(&credentials).is_err() { + return Ok(()); + } + + let mut contents = String::new(); + let mut file = File::open(&credentials)?; + file.read_to_string(&mut contents).chain_err(|| { + format!( + "failed to read configuration file `{}`", + credentials.display() + ) + })?; + + let toml = cargo_toml::parse(&contents, &credentials, self).chain_err(|| { + format!( + "could not parse TOML configuration in `{}`", + credentials.display() + ) + })?; + + let mut value = CV::from_toml(&credentials, toml).chain_err(|| { + format!( + "failed to load TOML configuration from `{}`", + credentials.display() + ) + })?; + + // Backwards compatibility for old `.cargo/credentials` layout. + { + let value = match value { + CV::Table(ref mut value, _) => value, + _ => unreachable!(), + }; + + if let Some(token) = value.remove("token") { + if let Vacant(entry) = value.entry("registry".into()) { + let mut map = HashMap::new(); + map.insert("token".into(), token); + let table = CV::Table(map, PathBuf::from(".")); + entry.insert(table); + } + } } + + // We want value to override `cfg`, so swap these. + mem::swap(cfg, &mut value); + cfg.merge(value)?; + Ok(()) } - fn get_tool(&self, tool: &str) -> CargoResult { + /// Looks for a path for `tool` in an environment variable or config path, and returns `None` + /// if it's not present. + fn maybe_get_tool(&self, tool: &str) -> CargoResult> { + let var = tool + .chars() + .flat_map(|c| c.to_uppercase()) + .collect::(); + if let Some(tool_path) = env::var_os(&var) { + let maybe_relative = match tool_path.to_str() { + Some(s) => s.contains('/') || s.contains('\\'), + None => false, + }; + let path = if maybe_relative { + self.cwd.join(tool_path) + } else { + PathBuf::from(tool_path) + }; + return Ok(Some(path)); + } + let var = format!("build.{}", tool); - if let Some(tool_path) = try!(self.get_path(&var)) { - return Ok(tool_path); + if let Some(tool_path) = self.get_path(&var)? { + return Ok(Some(tool_path.val)); } - let var = tool.chars().flat_map(|c| c.to_uppercase()).collect::(); - let tool = env::var_os(&var).unwrap_or_else(|| OsString::from(tool)); - Ok(PathBuf::from(tool)) + Ok(None) + } + + /// Looks for a path for `tool` in an environment variable or config path, defaulting to `tool` + /// as a path. + pub fn get_tool(&self, tool: &str) -> CargoResult { + self.maybe_get_tool(tool) + .map(|t| t.unwrap_or_else(|| PathBuf::from(tool))) + } + + pub fn jobserver_from_env(&self) -> Option<&jobserver::Client> { + self.jobserver.as_ref() + } + + pub fn http(&self) -> CargoResult<&RefCell> { + let http = self + .easy + .try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?; + { + let mut http = http.borrow_mut(); + http.reset(); + let timeout = ops::configure_http_handle(self, &mut http)?; + timeout.configure(&mut http)?; + } + Ok(http) + } + + pub fn crates_io_source_id(&self, f: F) -> CargoResult + where + F: FnMut() -> CargoResult, + { + Ok(*(self.crates_io_source_id.try_borrow_with(f)?)) + } + + pub fn creation_time(&self) -> Instant { + self.creation_time } + + // Retrieves a config variable. + // + // This supports most serde `Deserialize` types. Examples: + // + // let v: Option = config.get("some.nested.key")?; + // let v: Option = config.get("some.key")?; + // let v: Option> = config.get("foo")?; + pub fn get<'de, T: de::Deserialize<'de>>(&self, key: &str) -> CargoResult { + let d = Deserializer { + config: self, + key: ConfigKey::from_str(key), + }; + T::deserialize(d).map_err(|e| e.into()) + } + + pub fn assert_package_cache_locked<'a>(&self, f: &'a Filesystem) -> &'a Path { + let ret = f.as_path_unlocked(); + assert!( + self.package_cache_lock.borrow().is_some(), + "package cache lock is not currently held, Cargo forgot to call \ + `acquire_package_cache_lock` before we got to this stack frame", + ); + assert!(ret.starts_with(self.home_path.as_path_unlocked())); + ret + } + + /// Acquires an exclusive lock on the global "package cache" + /// + /// This lock is global per-process and can be acquired recursively. An RAII + /// structure is returned to release the lock, and if this process + /// abnormally terminates the lock is also released. + pub fn acquire_package_cache_lock(&self) -> CargoResult> { + let mut slot = self.package_cache_lock.borrow_mut(); + match *slot { + // We've already acquired the lock in this process, so simply bump + // the count and continue. + Some((_, ref mut cnt)) => { + *cnt += 1; + } + None => { + let path = ".package-cache"; + let desc = "package cache"; + + // First, attempt to open an exclusive lock which is in general + // the purpose of this lock! + // + // If that fails because of a readonly filesystem or a + // permission error, though, then we don't really want to fail + // just because of this. All files that this lock protects are + // in subfolders, so they're assumed by Cargo to also be + // readonly or have invalid permissions for us to write to. If + // that's the case, then we don't really need to grab a lock in + // the first place here. + // + // Despite this we attempt to grab a readonly lock. This means + // that if our read-only folder is shared read-write with + // someone else on the system we should synchronize with them, + // but if we can't even do that then we did our best and we just + // keep on chugging elsewhere. + match self.home_path.open_rw(path, self, desc) { + Ok(lock) => *slot = Some((Some(lock), 1)), + Err(e) => { + if maybe_readonly(&e) { + let lock = self.home_path.open_ro(path, self, desc).ok(); + *slot = Some((lock, 1)); + return Ok(PackageCacheLock(self)); + } + + Err(e).chain_err(|| "failed to acquire package cache lock")?; + } + } + } + } + return Ok(PackageCacheLock(self)); + + fn maybe_readonly(err: &failure::Error) -> bool { + err.iter_chain().any(|err| { + if let Some(io) = err.downcast_ref::() { + if io.kind() == io::ErrorKind::PermissionDenied { + return true; + } + + #[cfg(unix)] + return io.raw_os_error() == Some(libc::EROFS); + } + + false + }) + } + } + + pub fn release_package_cache_lock(&self) {} } -#[derive(Eq, PartialEq, Clone, RustcEncodable, RustcDecodable, Copy)] -pub enum Location { - Project, - Global +/// A segment of a config key. +/// +/// Config keys are split on dots for regular keys, or underscores for +/// environment keys. +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +enum ConfigKeyPart { + /// Case-insensitive part (checks uppercase in environment keys). + Part(String), + /// Case-sensitive part (environment keys must match exactly). + CasePart(String), } -#[derive(Eq,PartialEq,Clone,RustcDecodable)] +impl ConfigKeyPart { + fn to_env(&self) -> String { + match self { + ConfigKeyPart::Part(s) => s.replace("-", "_").to_uppercase(), + ConfigKeyPart::CasePart(s) => s.clone(), + } + } + + fn to_config(&self) -> String { + match self { + ConfigKeyPart::Part(s) => s.clone(), + ConfigKeyPart::CasePart(s) => s.clone(), + } + } +} + +/// Key for a configuration variable. +#[derive(Debug, Clone)] +struct ConfigKey(Vec); + +impl ConfigKey { + fn from_str(key: &str) -> ConfigKey { + ConfigKey( + key.split('.') + .map(|p| ConfigKeyPart::Part(p.to_string())) + .collect(), + ) + } + + fn join(&self, next: ConfigKeyPart) -> ConfigKey { + let mut res = self.clone(); + res.0.push(next); + res + } + + fn to_env(&self) -> String { + format!( + "CARGO_{}", + self.0 + .iter() + .map(|p| p.to_env()) + .collect::>() + .join("_") + ) + } + + fn to_config(&self) -> String { + self.0 + .iter() + .map(|p| p.to_config()) + .collect::>() + .join(".") + } + + fn last(&self) -> &ConfigKeyPart { + self.0.last().unwrap() + } +} + +impl fmt::Display for ConfigKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.to_config().fmt(f) + } +} + +/// Internal error for serde errors. +#[derive(Debug)] +pub struct ConfigError { + error: failure::Error, + definition: Option, +} + +impl ConfigError { + fn new(message: String, definition: Definition) -> ConfigError { + ConfigError { + error: failure::err_msg(message), + definition: Some(definition), + } + } + + fn expected(key: &str, expected: &str, found: &ConfigValue) -> ConfigError { + ConfigError { + error: failure::format_err!( + "`{}` expected {}, but found a {}", + key, + expected, + found.desc() + ), + definition: Some(Definition::Path(found.definition_path().to_path_buf())), + } + } + + fn missing(key: &str) -> ConfigError { + ConfigError { + error: failure::format_err!("missing config key `{}`", key), + definition: None, + } + } + + fn with_key_context(self, key: &str, definition: Definition) -> ConfigError { + ConfigError { + error: failure::format_err!("could not load config key `{}`: {}", key, self), + definition: Some(definition), + } + } +} + +impl std::error::Error for ConfigError {} + +// Future note: currently, we cannot override `Fail::cause` (due to +// specialization) so we have no way to return the underlying causes. In the +// future, once this limitation is lifted, this should instead implement +// `cause` and avoid doing the cause formatting here. +impl fmt::Display for ConfigError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let message = errors::display_causes(&self.error); + if let Some(ref definition) = self.definition { + write!(f, "error in {}: {}", definition, message) + } else { + message.fmt(f) + } + } +} + +impl de::Error for ConfigError { + fn custom(msg: T) -> Self { + ConfigError { + error: failure::err_msg(msg.to_string()), + definition: None, + } + } +} + +impl From for ConfigError { + fn from(error: failure::Error) -> Self { + ConfigError { + error, + definition: None, + } + } +} + +/// Serde deserializer used to convert config values to a target type using +/// `Config::get`. +pub struct Deserializer<'config> { + config: &'config Config, + key: ConfigKey, +} + +macro_rules! deserialize_method { + ($method:ident, $visit:ident, $getter:ident) => { + fn $method(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + let v = self.config.$getter(&self.key)?.ok_or_else(|| + ConfigError::missing(&self.key.to_config()))?; + let Value{val, definition} = v; + let res: Result = visitor.$visit(val); + res.map_err(|e| e.with_key_context(&self.key.to_config(), definition)) + } + } +} + +impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> { + type Error = ConfigError; + + fn deserialize_any(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + // Future note: If you ever need to deserialize a non-self describing + // map type, this should implement a starts_with check (similar to how + // ConfigMapAccess does). + if let Some(v) = self.config.env.get(&self.key.to_env()) { + let res: Result = if v == "true" || v == "false" { + visitor.visit_bool(v.parse().unwrap()) + } else if let Ok(v) = v.parse::() { + visitor.visit_i64(v) + } else if self.config.cli_unstable().advanced_env + && v.starts_with('[') + && v.ends_with(']') + { + visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?) + } else { + visitor.visit_string(v.clone()) + }; + return res.map_err(|e| { + e.with_key_context( + &self.key.to_config(), + Definition::Environment(self.key.to_env()), + ) + }); + } + + let o_cv = self.config.get_cv(&self.key.to_config())?; + if let Some(cv) = o_cv { + let res: (Result, PathBuf) = match cv { + CV::Integer(i, path) => (visitor.visit_i64(i), path), + CV::String(s, path) => (visitor.visit_string(s), path), + CV::List(_, path) => ( + visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?), + path, + ), + CV::Table(_, path) => ( + visitor.visit_map(ConfigMapAccess::new_map(self.config, self.key.clone())?), + path, + ), + CV::Boolean(b, path) => (visitor.visit_bool(b), path), + }; + let (res, path) = res; + return res + .map_err(|e| e.with_key_context(&self.key.to_config(), Definition::Path(path))); + } + Err(ConfigError::missing(&self.key.to_config())) + } + + deserialize_method!(deserialize_bool, visit_bool, get_bool_priv); + deserialize_method!(deserialize_i8, visit_i64, get_integer); + deserialize_method!(deserialize_i16, visit_i64, get_integer); + deserialize_method!(deserialize_i32, visit_i64, get_integer); + deserialize_method!(deserialize_i64, visit_i64, get_integer); + deserialize_method!(deserialize_u8, visit_i64, get_integer); + deserialize_method!(deserialize_u16, visit_i64, get_integer); + deserialize_method!(deserialize_u32, visit_i64, get_integer); + deserialize_method!(deserialize_u64, visit_i64, get_integer); + deserialize_method!(deserialize_string, visit_string, get_string_priv); + + fn deserialize_option(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + if self.config.has_key(&self.key) { + visitor.visit_some(self) + } else { + // Treat missing values as `None`. + visitor.visit_none() + } + } + + fn deserialize_struct( + self, + _name: &'static str, + fields: &'static [&'static str], + visitor: V, + ) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_map(ConfigMapAccess::new_struct(self.config, self.key, fields)?) + } + + fn deserialize_map(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_map(ConfigMapAccess::new_map(self.config, self.key)?) + } + + fn deserialize_seq(self, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?) + } + + fn deserialize_tuple(self, _len: usize, visitor: V) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?) + } + + fn deserialize_tuple_struct( + self, + _name: &'static str, + _len: usize, + visitor: V, + ) -> Result + where + V: de::Visitor<'de>, + { + visitor.visit_seq(ConfigSeqAccess::new(self.config, &self.key)?) + } + + fn deserialize_newtype_struct( + self, + name: &'static str, + visitor: V, + ) -> Result + where + V: de::Visitor<'de>, + { + if name == "ConfigRelativePath" { + match self.config.get_string_priv(&self.key)? { + Some(v) => { + let path = v + .definition + .root(self.config) + .join(v.val) + .display() + .to_string(); + visitor.visit_newtype_struct(path.into_deserializer()) + } + None => Err(ConfigError::missing(&self.key.to_config())), + } + } else { + visitor.visit_newtype_struct(self) + } + } + + // These aren't really supported, yet. + serde::forward_to_deserialize_any! { + f32 f64 char str bytes + byte_buf unit unit_struct + enum identifier ignored_any + } +} + +struct ConfigMapAccess<'config> { + config: &'config Config, + key: ConfigKey, + set_iter: as IntoIterator>::IntoIter, + next: Option, +} + +impl<'config> ConfigMapAccess<'config> { + fn new_map( + config: &'config Config, + key: ConfigKey, + ) -> Result, ConfigError> { + let mut set = HashSet::new(); + if let Some(mut v) = config.get_table(&key.to_config())? { + // `v: Value>` + for (key, _value) in v.val.drain() { + set.insert(ConfigKeyPart::CasePart(key)); + } + } + if config.cli_unstable().advanced_env { + // `CARGO_PROFILE_DEV_OVERRIDES_` + let env_pattern = format!("{}_", key.to_env()); + for env_key in config.env.keys() { + if env_key.starts_with(&env_pattern) { + // `CARGO_PROFILE_DEV_OVERRIDES_bar_OPT_LEVEL = 3` + let rest = &env_key[env_pattern.len()..]; + // `rest = bar_OPT_LEVEL` + let part = rest.splitn(2, '_').next().unwrap(); + // `part = "bar"` + set.insert(ConfigKeyPart::CasePart(part.to_string())); + } + } + } + Ok(ConfigMapAccess { + config, + key, + set_iter: set.into_iter(), + next: None, + }) + } + + fn new_struct( + config: &'config Config, + key: ConfigKey, + fields: &'static [&'static str], + ) -> Result, ConfigError> { + let mut set = HashSet::new(); + for field in fields { + set.insert(ConfigKeyPart::Part(field.to_string())); + } + if let Some(mut v) = config.get_table(&key.to_config())? { + for (t_key, value) in v.val.drain() { + let part = ConfigKeyPart::Part(t_key); + if !set.contains(&part) { + config.shell().warn(format!( + "unused key `{}` in config file `{}`", + key.join(part).to_config(), + value.definition_path().display() + ))?; + } + } + } + Ok(ConfigMapAccess { + config, + key, + set_iter: set.into_iter(), + next: None, + }) + } +} + +impl<'de, 'config> de::MapAccess<'de> for ConfigMapAccess<'config> { + type Error = ConfigError; + + fn next_key_seed(&mut self, seed: K) -> Result, Self::Error> + where + K: de::DeserializeSeed<'de>, + { + match self.set_iter.next() { + Some(key) => { + let de_key = key.to_config(); + self.next = Some(key); + seed.deserialize(de_key.into_deserializer()).map(Some) + } + None => Ok(None), + } + } + + fn next_value_seed(&mut self, seed: V) -> Result + where + V: de::DeserializeSeed<'de>, + { + let next_key = self.next.take().expect("next field missing"); + let next_key = self.key.join(next_key); + seed.deserialize(Deserializer { + config: self.config, + key: next_key, + }) + } +} + +struct ConfigSeqAccess { + list_iter: vec::IntoIter<(String, Definition)>, +} + +impl ConfigSeqAccess { + fn new(config: &Config, key: &ConfigKey) -> Result { + let mut res = Vec::new(); + if let Some(v) = config.get_list(&key.to_config())? { + for (s, path) in v.val { + res.push((s, Definition::Path(path))); + } + } + + if config.cli_unstable().advanced_env { + // Parse an environment string as a TOML array. + let env_key = key.to_env(); + let def = Definition::Environment(env_key.clone()); + if let Some(v) = config.env.get(&env_key) { + if !(v.starts_with('[') && v.ends_with(']')) { + return Err(ConfigError::new( + format!("should have TOML list syntax, found `{}`", v), + def, + )); + } + let temp_key = key.last().to_env(); + let toml_s = format!("{}={}", temp_key, v); + let toml_v: toml::Value = toml::de::from_str(&toml_s).map_err(|e| { + ConfigError::new(format!("could not parse TOML list: {}", e), def.clone()) + })?; + let values = toml_v + .as_table() + .unwrap() + .get(&temp_key) + .unwrap() + .as_array() + .expect("env var was not array"); + for value in values { + // TODO: support other types. + let s = value.as_str().ok_or_else(|| { + ConfigError::new( + format!("expected string, found {}", value.type_str()), + def.clone(), + ) + })?; + res.push((s.to_string(), def.clone())); + } + } + } + Ok(ConfigSeqAccess { + list_iter: res.into_iter(), + }) + } +} + +impl<'de> de::SeqAccess<'de> for ConfigSeqAccess { + type Error = ConfigError; + + fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> + where + T: de::DeserializeSeed<'de>, + { + match self.list_iter.next() { + // TODO: add `def` to error? + Some((value, _def)) => seed.deserialize(value.into_deserializer()).map(Some), + None => Ok(None), + } + } +} + +/// Use with the `get` API to fetch a string that will be converted to a +/// `PathBuf`. Relative paths are converted to absolute paths based on the +/// location of the config file. +#[derive(Debug, Eq, PartialEq, Clone, Deserialize)] +pub struct ConfigRelativePath(PathBuf); + +impl ConfigRelativePath { + pub fn path(self) -> PathBuf { + self.0 + } +} + +#[derive(Eq, PartialEq, Clone)] pub enum ConfigValue { Integer(i64, PathBuf), String(String, PathBuf), @@ -271,20 +1457,32 @@ pub enum ConfigValue { Boolean(bool, PathBuf), } +pub struct Value { + pub val: T, + pub definition: Definition, +} + +pub type OptValue = Option>; + +#[derive(Clone, Debug)] +pub enum Definition { + Path(PathBuf), + Environment(String), +} + impl fmt::Debug for ConfigValue { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { - CV::Integer(i, ref path) => write!(f, "{} (from {})", i, - path.display()), - CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, - path.display()), - CV::String(ref s, ref path) => write!(f, "{} (from {})", s, - path.display()), + CV::Integer(i, ref path) => write!(f, "{} (from {})", i, path.display()), + CV::Boolean(b, ref path) => write!(f, "{} (from {})", b, path.display()), + CV::String(ref s, ref path) => write!(f, "{} (from {})", s, path.display()), CV::List(ref list, ref path) => { - try!(write!(f, "[")); + write!(f, "[")?; for (i, &(ref s, ref path)) in list.iter().enumerate() { - if i > 0 { try!(write!(f, ", ")); } - try!(write!(f, "{} (from {})", s, path.display())); + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{} (from {})", s, path.display())?; } write!(f, "] (from {})", path.display()) } @@ -293,121 +1491,132 @@ impl fmt::Debug for ConfigValue { } } -impl Encodable for ConfigValue { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - match *self { - CV::String(ref string, _) => string.encode(s), - CV::List(ref list, _) => { - let list: Vec<&String> = list.iter().map(|s| &s.0).collect(); - list.encode(s) - } - CV::Table(ref table, _) => table.encode(s), - CV::Boolean(b, _) => b.encode(s), - CV::Integer(i, _) => i.encode(s), - } - } -} - impl ConfigValue { fn from_toml(path: &Path, toml: toml::Value) -> CargoResult { match toml { toml::Value::String(val) => Ok(CV::String(val, path.to_path_buf())), toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())), toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())), - toml::Value::Array(val) => { - Ok(CV::List(try!(val.into_iter().map(|toml| { - match toml { + toml::Value::Array(val) => Ok(CV::List( + val.into_iter() + .map(|toml| match toml { toml::Value::String(val) => Ok((val, path.to_path_buf())), - v => Err(human(format!("expected string but found {} \ - in list", v.type_str()))), - } - }).collect::>()), path.to_path_buf())) + v => failure::bail!("expected string but found {} in list", v.type_str()), + }) + .collect::>()?, + path.to_path_buf(), + )), + toml::Value::Table(val) => Ok(CV::Table( + val.into_iter() + .map(|(key, value)| { + let value = CV::from_toml(path, value) + .chain_err(|| format!("failed to parse key `{}`", key))?; + Ok((key, value)) + }) + .collect::>()?, + path.to_path_buf(), + )), + v => failure::bail!( + "found TOML configuration value of unknown type `{}`", + v.type_str() + ), + } + } + + fn into_toml(self) -> toml::Value { + match self { + CV::Boolean(s, _) => toml::Value::Boolean(s), + CV::String(s, _) => toml::Value::String(s), + CV::Integer(i, _) => toml::Value::Integer(i), + CV::List(l, _) => { + toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect()) } - toml::Value::Table(val) => { - Ok(CV::Table(try!(val.into_iter().map(|(key, value)| { - let value = try!(CV::from_toml(path, value).chain_error(|| { - human(format!("failed to parse key `{}`", key)) - })); - Ok((key, value)) - }).collect::>()), path.to_path_buf())) + CV::Table(l, _) => { + toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect()) } - v => return Err(human(format!("found TOML configuration value of \ - unknown type `{}`", v.type_str()))) } } fn merge(&mut self, from: ConfigValue) -> CargoResult<()> { match (self, from) { - (&mut CV::String(..), CV::String(..)) | - (&mut CV::Integer(..), CV::Integer(..)) | - (&mut CV::Boolean(..), CV::Boolean(..)) => {} (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => { let new = mem::replace(new, Vec::new()); old.extend(new.into_iter()); } (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => { let new = mem::replace(new, HashMap::new()); - for (key, value) in new.into_iter() { + for (key, value) in new { match old.entry(key.clone()) { Occupied(mut entry) => { let path = value.definition_path().to_path_buf(); let entry = entry.get_mut(); - try!(entry.merge(value).chain_error(|| { - human(format!("failed to merge key `{}` between \ - files:\n \ - file 1: {}\n \ - file 2: {}", - key, - entry.definition_path().display(), - path.display())) - - })); + entry.merge(value).chain_err(|| { + format!( + "failed to merge key `{}` between \ + files:\n \ + file 1: {}\n \ + file 2: {}", + key, + entry.definition_path().display(), + path.display() + ) + })?; + } + Vacant(entry) => { + entry.insert(value); } - Vacant(entry) => { entry.insert(value); } }; } } - (expected, found) => { - return Err(internal(format!("expected {}, but found {}", - expected.desc(), found.desc()))) + // Allow switching types except for tables or arrays. + (expected @ &mut CV::List(_, _), found) + | (expected @ &mut CV::Table(_, _), found) + | (expected, found @ CV::List(_, _)) + | (expected, found @ CV::Table(_, _)) => { + return Err(internal(format!( + "expected {}, but found {}", + expected.desc(), + found.desc() + ))); } + _ => {} } Ok(()) } - pub fn i64(&self) -> CargoResult<(i64, &Path)> { + pub fn i64(&self, key: &str) -> CargoResult<(i64, &Path)> { match *self { CV::Integer(i, ref p) => Ok((i, p)), - _ => self.expected("integer"), + _ => self.expected("integer", key), } } - pub fn string(&self) -> CargoResult<(&str, &Path)> { + pub fn string(&self, key: &str) -> CargoResult<(&str, &Path)> { match *self { CV::String(ref s, ref p) => Ok((s, p)), - _ => self.expected("string"), + _ => self.expected("string", key), } } - pub fn table(&self) -> CargoResult<(&HashMap, &Path)> { + pub fn table(&self, key: &str) -> CargoResult<(&HashMap, &Path)> { match *self { CV::Table(ref table, ref p) => Ok((table, p)), - _ => self.expected("table"), + _ => self.expected("table", key), } } - pub fn list(&self) -> CargoResult<&[(String, PathBuf)]> { + pub fn list(&self, key: &str) -> CargoResult<&[(String, PathBuf)]> { match *self { CV::List(ref list, _) => Ok(list), - _ => self.expected("list"), + _ => self.expected("list", key), } } - pub fn boolean(&self) -> CargoResult<(bool, &Path)> { + pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Path)> { match *self { CV::Boolean(b, ref p) => Ok((b, p)), - _ => self.expected("bool"), + _ => self.expected("bool", key), } } @@ -422,98 +1631,163 @@ impl ConfigValue { } pub fn definition_path(&self) -> &Path { - match *self { - CV::Boolean(_, ref p) | - CV::Integer(_, ref p) | - CV::String(_, ref p) | - CV::List(_, ref p) | - CV::Table(_, ref p) => p + match *self { + CV::Boolean(_, ref p) + | CV::Integer(_, ref p) + | CV::String(_, ref p) + | CV::List(_, ref p) + | CV::Table(_, ref p) => p, } } - fn expected(&self, wanted: &str) -> CargoResult { - Err(internal(format!("expected a {}, but found a {} in {}", - wanted, self.desc(), - self.definition_path().display()))) + fn expected(&self, wanted: &str, key: &str) -> CargoResult { + failure::bail!( + "expected a {}, but found a {} for `{}` in {}", + wanted, + self.desc(), + key, + self.definition_path().display() + ) } +} - fn into_toml(self) -> toml::Value { - match self { - CV::Boolean(s, _) => toml::Value::Boolean(s), - CV::String(s, _) => toml::Value::String(s), - CV::Integer(i, _) => toml::Value::Integer(i), - CV::List(l, _) => toml::Value::Array(l - .into_iter() - .map(|(s, _)| toml::Value::String(s)) - .collect()), - CV::Table(l, _) => toml::Value::Table(l.into_iter() - .map(|(k, v)| (k, v.into_toml())) - .collect()), +impl Definition { + pub fn root<'a>(&'a self, config: &'a Config) -> &'a Path { + match *self { + Definition::Path(ref p) => p.parent().unwrap().parent().unwrap(), + Definition::Environment(_) => config.cwd(), } } } -fn homedir(cwd: &Path) -> Option { - let cargo_home = env::var_os("CARGO_HOME").map(|home| { - cwd.join(home) - }); - let user_home = env::home_dir().map(|p| p.join(".cargo")); - return cargo_home.or(user_home); +impl fmt::Display for Definition { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Definition::Path(ref p) => p.display().fmt(f), + Definition::Environment(ref key) => write!(f, "environment variable `{}`", key), + } + } } -fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()> - where F: FnMut(File, &Path) -> CargoResult<()> +pub fn homedir(cwd: &Path) -> Option { + ::home::cargo_home_with_cwd(cwd).ok() +} + +fn walk_tree(pwd: &Path, home: &Path, mut walk: F) -> CargoResult<()> +where + F: FnMut(&Path) -> CargoResult<()>, { - let mut current = pwd; + let mut stash: HashSet = HashSet::new(); - loop { + for current in paths::ancestors(pwd) { let possible = current.join(".cargo").join("config"); if fs::metadata(&possible).is_ok() { - let file = try!(File::open(&possible)); - - try!(walk(file, &possible)); - } - match current.parent() { - Some(p) => current = p, - None => break, + walk(&possible)?; + stash.insert(possible); } } // Once we're done, also be sure to walk the home directory even if it's not // in our history to be sure we pick up that standard location for // information. - let home = try!(homedir(pwd).chain_error(|| { - human("Cargo couldn't find your home directory. \ - This probably means that $HOME was not set.") - })); - if !pwd.starts_with(&home) { - let config = home.join("config"); - if fs::metadata(&config).is_ok() { - let file = try!(File::open(&config)); - try!(walk(file, &config)); - } + let config = home.join("config"); + if !stash.contains(&config) && fs::metadata(&config).is_ok() { + walk(&config)?; } Ok(()) } -pub fn set_config(cfg: &Config, loc: Location, key: &str, - value: ConfigValue) -> CargoResult<()> { - // TODO: There are a number of drawbacks here - // - // 1. Project is unimplemented - // 2. This blows away all comments in a file - // 3. This blows away the previous ordering of a file. - let file = match loc { - Location::Global => cfg.home_path.join("config"), - Location::Project => unimplemented!(), +pub fn save_credentials(cfg: &Config, token: String, registry: Option) -> CargoResult<()> { + let mut file = { + cfg.home_path.create_dir()?; + cfg.home_path + .open_rw(Path::new("credentials"), cfg, "credentials' config file")? + }; + + let (key, value) = { + let key = "token".to_string(); + let value = ConfigValue::String(token, file.path().to_path_buf()); + let mut map = HashMap::new(); + map.insert(key, value); + let table = CV::Table(map, file.path().to_path_buf()); + + if let Some(registry) = registry { + let mut map = HashMap::new(); + map.insert(registry, table); + ( + "registries".into(), + CV::Table(map, file.path().to_path_buf()), + ) + } else { + ("registry".into(), table) + } }; - try!(fs::create_dir_all(file.parent().unwrap())); + let mut contents = String::new(); - let _ = File::open(&file).and_then(|mut f| f.read_to_string(&mut contents)); - let mut toml = try!(cargo_toml::parse(&contents, &file)); - toml.insert(key.to_string(), value.into_toml()); - let mut out = try!(File::create(&file)); - try!(out.write_all(toml::Value::Table(toml).to_string().as_bytes())); - Ok(()) + file.read_to_string(&mut contents).chain_err(|| { + format!( + "failed to read configuration file `{}`", + file.path().display() + ) + })?; + + let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?; + + // Move the old token location to the new one. + if let Some(token) = toml.as_table_mut().unwrap().remove("token") { + let mut map = HashMap::new(); + map.insert("token".to_string(), token); + toml.as_table_mut() + .unwrap() + .insert("registry".into(), map.into()); + } + + toml.as_table_mut().unwrap().insert(key, value.into_toml()); + + let contents = toml.to_string(); + file.seek(SeekFrom::Start(0))?; + file.write_all(contents.as_bytes())?; + file.file().set_len(contents.len() as u64)?; + set_permissions(file.file(), 0o600)?; + + return Ok(()); + + #[cfg(unix)] + fn set_permissions(file: &File, mode: u32) -> CargoResult<()> { + use std::os::unix::fs::PermissionsExt; + + let mut perms = file.metadata()?.permissions(); + perms.set_mode(mode); + file.set_permissions(perms)?; + Ok(()) + } + + #[cfg(not(unix))] + #[allow(unused)] + fn set_permissions(file: &File, mode: u32) -> CargoResult<()> { + Ok(()) + } +} + +pub struct PackageCacheLock<'a>(&'a Config); + +impl Drop for PackageCacheLock<'_> { + fn drop(&mut self) { + let mut slot = self.0.package_cache_lock.borrow_mut(); + let (_, cnt) = slot.as_mut().unwrap(); + *cnt -= 1; + if *cnt == 0 { + *slot = None; + } + } +} + +/// returns path to clippy-driver binary +/// +/// Allows override of the path via `CARGO_CLIPPY_DRIVER` env variable +pub fn clippy_driver() -> PathBuf { + env::var("CARGO_CLIPPY_DRIVER") + .unwrap_or_else(|_| "clippy-driver".into()) + .into() } diff --git a/src/cargo/util/dependency_queue.rs b/src/cargo/util/dependency_queue.rs index f66e503d37d..9a218408069 100644 --- a/src/cargo/util/dependency_queue.rs +++ b/src/cargo/util/dependency_queue.rs @@ -3,132 +3,202 @@ //! //! This structure is used to store the dependency graph and dynamically update //! it to figure out when a dependency should be built. +//! +//! Dependencies in this queue are represented as a (node, edge) pair. This is +//! used to model nodes which produce multiple outputs at different times but +//! some nodes may only require one of the outputs and can start before the +//! whole node is finished. -use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::{HashMap, HashSet}; use std::hash::Hash; -pub use self::Freshness::{Fresh, Dirty}; - -pub struct DependencyQueue { +#[derive(Debug)] +pub struct DependencyQueue { /// A list of all known keys to build. /// /// The value of the hash map is list of dependencies which still need to be /// built before the package can be built. Note that the set is dynamically /// updated as more dependencies are built. - dep_map: HashMap, V)>, + dep_map: HashMap, V)>, /// A reverse mapping of a package to all packages that depend on that /// package. /// /// This map is statically known and does not get updated throughout the /// lifecycle of the DependencyQueue. - reverse_dep_map: HashMap>, - - /// A set of dirty packages. /// - /// Packages may become dirty over time if their dependencies are rebuilt. - dirty: HashSet, - - /// The packages which are currently being built, waiting for a call to - /// `finish`. - pending: HashSet, -} - -/// Indication of the freshness of a package. -/// -/// A fresh package does not necessarily need to be rebuilt (unless a dependency -/// was also rebuilt), and a dirty package must always be rebuilt. -#[derive(PartialEq, Eq, Debug, Clone, Copy)] -pub enum Freshness { - Fresh, - Dirty, -} + /// This is sort of like a `HashMap<(N, E), HashSet>` map, but more + /// easily indexable with just an `N` + reverse_dep_map: HashMap>>, -/// A trait for discovering the dependencies of a piece of data. -pub trait Dependency: Hash + Eq + Clone { - type Context; - fn dependencies(&self, cx: &Self::Context) -> Vec; + /// Topological depth of each key + depth: HashMap, } -impl Freshness { - pub fn combine(&self, other: Freshness) -> Freshness { - match *self { Fresh => other, Dirty => Dirty } +impl Default for DependencyQueue { + fn default() -> DependencyQueue { + DependencyQueue::new() } } -impl DependencyQueue { +impl DependencyQueue { /// Creates a new dependency queue with 0 packages. - pub fn new() -> DependencyQueue { + pub fn new() -> DependencyQueue { DependencyQueue { dep_map: HashMap::new(), reverse_dep_map: HashMap::new(), - dirty: HashSet::new(), - pending: HashSet::new(), + depth: HashMap::new(), } } +} - /// Adds a new package to this dependency queue. +impl DependencyQueue { + /// Adds a new ndoe and its dependencies to this queue. /// - /// It is assumed that any dependencies of this package will eventually also - /// be added to the dependency queue. - pub fn queue(&mut self, cx: &K::Context, fresh: Freshness, - key: K, value: V) -> &mut V { - let slot = match self.dep_map.entry(key.clone()) { - Occupied(v) => return &mut v.into_mut().1, - Vacant(v) => v, - }; + /// The `key` specified is a new node in the dependency graph, and the node + /// depend on all the dependencies iterated by `dependencies`. Each + /// dependency is a node/edge pair, where edges can be thought of as + /// productions from nodes (aka if it's just `()` it's just waiting for the + /// node to finish). + /// + /// An optional `value` can also be associated with `key` which is reclaimed + /// when the node is ready to go. + pub fn queue(&mut self, key: N, value: V, dependencies: impl IntoIterator) { + assert!(!self.dep_map.contains_key(&key)); - if fresh == Dirty { - self.dirty.insert(key.clone()); + let mut my_dependencies = HashSet::new(); + for (dep, edge) in dependencies { + my_dependencies.insert((dep.clone(), edge.clone())); + self.reverse_dep_map + .entry(dep) + .or_insert_with(HashMap::new) + .entry(edge) + .or_insert_with(HashSet::new) + .insert(key.clone()); } + self.dep_map.insert(key, (my_dependencies, value)); + } - let mut my_dependencies = HashSet::new(); - for dep in key.dependencies(cx).into_iter() { - assert!(my_dependencies.insert(dep.clone())); - let rev = self.reverse_dep_map.entry(dep).or_insert(HashSet::new()); - assert!(rev.insert(key.clone())); + /// All nodes have been added, calculate some internal metadata and prepare + /// for `dequeue`. + pub fn queue_finished(&mut self) { + for key in self.dep_map.keys() { + depth(key, &self.reverse_dep_map, &mut self.depth); + } + + fn depth( + key: &N, + map: &HashMap>>, + results: &mut HashMap, + ) -> usize { + const IN_PROGRESS: usize = !0; + + if let Some(&depth) = results.get(key) { + assert_ne!(depth, IN_PROGRESS, "cycle in DependencyQueue"); + return depth; + } + + results.insert(key.clone(), IN_PROGRESS); + + let depth = 1 + map + .get(key) + .into_iter() + .flat_map(|it| it.values()) + .flat_map(|set| set) + .map(|dep| depth(dep, map, results)) + .max() + .unwrap_or(0); + + *results.get_mut(key).unwrap() = depth; + + depth } - &mut slot.insert((my_dependencies, value)).1 } /// Dequeues a package that is ready to be built. /// /// A package is ready to be built when it has 0 un-built dependencies. If /// `None` is returned then no packages are ready to be built. - pub fn dequeue(&mut self) -> Option<(Freshness, K, V)> { - let key = match self.dep_map.iter() - .find(|&(_, &(ref deps, _))| deps.len() == 0) - .map(|(key, _)| key.clone()) { + pub fn dequeue(&mut self) -> Option<(N, V)> { + // Look at all our crates and find everything that's ready to build (no + // deps). After we've got that candidate set select the one which has + // the maximum depth in the dependency graph. This way we should + // hopefully keep CPUs hottest the longest by ensuring that long + // dependency chains are scheduled early on in the build process and the + // leafs higher in the tree can fill in the cracks later. + // + // TODO: it'd be best here to throw in a heuristic of crate size as + // well. For example how long did this crate historically take to + // compile? How large is its source code? etc. + let next = self + .dep_map + .iter() + .filter(|(_, (deps, _))| deps.is_empty()) + .map(|(key, _)| key.clone()) + .max_by_key(|k| self.depth[k]); + let key = match next { Some(key) => key, - None => return None + None => return None, }; let (_, data) = self.dep_map.remove(&key).unwrap(); - let fresh = if self.dirty.contains(&key) {Dirty} else {Fresh}; - self.pending.insert(key.clone()); - Some((fresh, key, data)) + Some((key, data)) + } + + /// Returns `true` if there are remaining packages to be built. + pub fn is_empty(&self) -> bool { + self.dep_map.is_empty() } /// Returns the number of remaining packages to be built. pub fn len(&self) -> usize { - self.dep_map.len() + self.pending.len() + self.dep_map.len() } - /// Indicate that a package has been built. + /// Indicate that something has finished. /// - /// This function will update the dependency queue with this information, - /// possibly allowing the next invocation of `dequeue` to return a package. - pub fn finish(&mut self, key: &K, fresh: Freshness) { - assert!(self.pending.remove(key)); - let reverse_deps = match self.reverse_dep_map.get(key) { + /// Calling this function indicates that the `node` has produced `edge`. All + /// remaining work items which only depend on this node/edge pair are now + /// candidates to start their job. + pub fn finish(&mut self, node: &N, edge: &E) { + let reverse_deps = self.reverse_dep_map.get(node).and_then(|map| map.get(edge)); + let reverse_deps = match reverse_deps { Some(deps) => deps, None => return, }; + let key = (node.clone(), edge.clone()); for dep in reverse_deps.iter() { - if fresh == Dirty { - self.dirty.insert(dep.clone()); - } - assert!(self.dep_map.get_mut(dep).unwrap().0.remove(key)); + assert!(self.dep_map.get_mut(dep).unwrap().0.remove(&key)); } } } + +#[cfg(test)] +mod test { + use super::DependencyQueue; + + #[test] + fn deep_first() { + let mut q = DependencyQueue::new(); + + q.queue(1, (), vec![]); + q.queue(2, (), vec![(1, ())]); + q.queue(3, (), vec![]); + q.queue(4, (), vec![(2, ()), (3, ())]); + q.queue(5, (), vec![(4, ()), (3, ())]); + q.queue_finished(); + + assert_eq!(q.dequeue(), Some((1, ()))); + assert_eq!(q.dequeue(), Some((3, ()))); + assert_eq!(q.dequeue(), None); + q.finish(&3, &()); + assert_eq!(q.dequeue(), None); + q.finish(&1, &()); + assert_eq!(q.dequeue(), Some((2, ()))); + assert_eq!(q.dequeue(), None); + q.finish(&2, &()); + assert_eq!(q.dequeue(), Some((4, ()))); + assert_eq!(q.dequeue(), None); + q.finish(&4, &()); + assert_eq!(q.dequeue(), Some((5, ()))); + } +} diff --git a/src/cargo/util/diagnostic_server.rs b/src/cargo/util/diagnostic_server.rs new file mode 100644 index 00000000000..237536dd0b7 --- /dev/null +++ b/src/cargo/util/diagnostic_server.rs @@ -0,0 +1,292 @@ +//! A small TCP server to handle collection of diagnostics information in a +//! cross-platform way for the `cargo fix` command. + +use std::collections::HashSet; +use std::env; +use std::io::{BufReader, Read, Write}; +use std::net::{Shutdown, SocketAddr, TcpListener, TcpStream}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Arc; +use std::thread::{self, JoinHandle}; + +use failure::{Error, ResultExt}; +use log::warn; +use serde::{Deserialize, Serialize}; + +use crate::util::errors::CargoResult; +use crate::util::{Config, ProcessBuilder}; + +const DIAGNOSICS_SERVER_VAR: &str = "__CARGO_FIX_DIAGNOSTICS_SERVER"; +const PLEASE_REPORT_THIS_BUG: &str = + "This likely indicates a bug in either rustc or cargo itself,\n\ + and we would appreciate a bug report! You're likely to see \n\ + a number of compiler warnings after this message which cargo\n\ + attempted to fix but failed. If you could open an issue at\n\ + https://github.com/rust-lang/rust/issues\n\ + quoting the full output of this command we'd be very appreciative!\n\ + Note that you may be able to make some more progress in the near-term\n\ + fixing code with the `--broken-code` flag\n\n\ + "; + +#[derive(Deserialize, Serialize)] +pub enum Message { + Fixing { + file: String, + fixes: u32, + }, + FixFailed { + files: Vec, + krate: Option, + errors: Vec, + }, + ReplaceFailed { + file: String, + message: String, + }, + EditionAlreadyEnabled { + file: String, + edition: String, + }, + IdiomEditionMismatch { + file: String, + idioms: String, + edition: Option, + }, +} + +impl Message { + pub fn post(&self) -> Result<(), Error> { + let addr = + env::var(DIAGNOSICS_SERVER_VAR).context("diagnostics collector misconfigured")?; + let mut client = + TcpStream::connect(&addr).context("failed to connect to parent diagnostics target")?; + + let s = serde_json::to_string(self).context("failed to serialize message")?; + client + .write_all(s.as_bytes()) + .context("failed to write message to diagnostics target")?; + client + .shutdown(Shutdown::Write) + .context("failed to shutdown")?; + + let mut tmp = Vec::new(); + client + .read_to_end(&mut tmp) + .context("failed to receive a disconnect")?; + + Ok(()) + } +} + +pub struct DiagnosticPrinter<'a> { + config: &'a Config, + edition_already_enabled: HashSet, + idiom_mismatch: HashSet, +} + +impl<'a> DiagnosticPrinter<'a> { + pub fn new(config: &'a Config) -> DiagnosticPrinter<'a> { + DiagnosticPrinter { + config, + edition_already_enabled: HashSet::new(), + idiom_mismatch: HashSet::new(), + } + } + + pub fn print(&mut self, msg: &Message) -> CargoResult<()> { + match msg { + Message::Fixing { file, fixes } => { + let msg = if *fixes == 1 { "fix" } else { "fixes" }; + let msg = format!("{} ({} {})", file, fixes, msg); + self.config.shell().status("Fixing", msg) + } + Message::ReplaceFailed { file, message } => { + let msg = format!("error applying suggestions to `{}`\n", file); + self.config.shell().warn(&msg)?; + write!( + self.config.shell().err(), + "The full error message was:\n\n> {}\n\n", + message, + )?; + write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?; + Ok(()) + } + Message::FixFailed { + files, + krate, + errors, + } => { + if let Some(ref krate) = *krate { + self.config.shell().warn(&format!( + "failed to automatically apply fixes suggested by rustc \ + to crate `{}`", + krate, + ))?; + } else { + self.config + .shell() + .warn("failed to automatically apply fixes suggested by rustc")?; + } + if !files.is_empty() { + writeln!( + self.config.shell().err(), + "\nafter fixes were automatically applied the compiler \ + reported errors within these files:\n" + )?; + for file in files { + writeln!(self.config.shell().err(), " * {}", file)?; + } + writeln!(self.config.shell().err())?; + } + write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?; + if !errors.is_empty() { + writeln!( + self.config.shell().err(), + "The following errors were reported:" + )?; + for error in errors { + write!(self.config.shell().err(), "{}", error)?; + if !error.ends_with('\n') { + writeln!(self.config.shell().err())?; + } + } + } + writeln!( + self.config.shell().err(), + "Original diagnostics will follow.\n" + )?; + Ok(()) + } + Message::EditionAlreadyEnabled { file, edition } => { + // Like above, only warn once per file + if !self.edition_already_enabled.insert(file.clone()) { + return Ok(()); + } + + let msg = format!( + "\ +cannot prepare for the {} edition when it is enabled, so cargo cannot +automatically fix errors in `{}` + +To prepare for the {0} edition you should first remove `edition = '{0}'` from +your `Cargo.toml` and then rerun this command. Once all warnings have been fixed +then you can re-enable the `edition` key in `Cargo.toml`. For some more +information about transitioning to the {0} edition see: + + https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html +", + edition, + file, + ); + self.config.shell().error(&msg)?; + Ok(()) + } + Message::IdiomEditionMismatch { + file, + idioms, + edition, + } => { + // Same as above + if !self.idiom_mismatch.insert(file.clone()) { + return Ok(()); + } + self.config.shell().error(&format!( + "\ +cannot migrate to the idioms of the {} edition for `{}` +because it is compiled {}, which doesn't match {0} + +consider migrating to the {0} edition by adding `edition = '{0}'` to +`Cargo.toml` and then rerunning this command; a more detailed transition +guide can be found at + + https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html +", + idioms, + file, + match edition { + Some(s) => format!("with the {} edition", s), + None => "without an edition".to_string(), + }, + ))?; + Ok(()) + } + } + } +} + +#[derive(Debug)] +pub struct RustfixDiagnosticServer { + listener: TcpListener, + addr: SocketAddr, +} + +pub struct StartedServer { + addr: SocketAddr, + done: Arc, + thread: Option>, +} + +impl RustfixDiagnosticServer { + pub fn new() -> Result { + let listener = TcpListener::bind("127.0.0.1:0") + .with_context(|_| "failed to bind TCP listener to manage locking")?; + let addr = listener.local_addr()?; + + Ok(RustfixDiagnosticServer { listener, addr }) + } + + pub fn configure(&self, process: &mut ProcessBuilder) { + process.env(DIAGNOSICS_SERVER_VAR, self.addr.to_string()); + } + + pub fn start(self, on_message: F) -> Result + where + F: Fn(Message) + Send + 'static, + { + let addr = self.addr; + let done = Arc::new(AtomicBool::new(false)); + let done2 = done.clone(); + let thread = thread::spawn(move || { + self.run(&on_message, &done2); + }); + + Ok(StartedServer { + addr, + thread: Some(thread), + done, + }) + } + + fn run(self, on_message: &dyn Fn(Message), done: &AtomicBool) { + while let Ok((client, _)) = self.listener.accept() { + if done.load(Ordering::SeqCst) { + break; + } + let mut client = BufReader::new(client); + let mut s = String::new(); + if let Err(e) = client.read_to_string(&mut s) { + warn!("diagnostic server failed to read: {}", e); + } else { + match serde_json::from_str(&s) { + Ok(message) => on_message(message), + Err(e) => warn!("invalid diagnostics message: {}", e), + } + } + // The client should be kept alive until after `on_message` is + // called to ensure that the client doesn't exit too soon (and + // Message::Finish getting posted before Message::FixDiagnostic). + drop(client); + } + } +} + +impl Drop for StartedServer { + fn drop(&mut self) { + self.done.store(true, Ordering::SeqCst); + // Ignore errors here as this is largely best-effort + if TcpStream::connect(&self.addr).is_err() { + return; + } + drop(self.thread.take().unwrap().join()); + } +} diff --git a/src/cargo/util/errors.rs b/src/cargo/util/errors.rs index 4c24b34c59b..1abba82b3ff 100644 --- a/src/cargo/util/errors.rs +++ b/src/cargo/util/errors.rs @@ -1,308 +1,298 @@ -use std::error::Error; -use std::ffi; +#![allow(unknown_lints)] + use std::fmt; -use std::io; -use std::process::{Output, ExitStatus}; +use std::path::PathBuf; +use std::process::{ExitStatus, Output}; use std::str; -use semver; -use rustc_serialize::json; - -use curl; -use git2; -use toml; -use url; +use clap; +use failure::{Context, Error, Fail}; +use log::trace; -pub type CargoResult = Result>; +use crate::core::{TargetKind, Workspace}; +use crate::ops::CompileOptions; -// ============================================================================= -// CargoError trait +pub type CargoResult = failure::Fallible; // Alex's body isn't quite ready to give up "Result" -pub trait CargoError: Error + Send + 'static { - fn is_human(&self) -> bool { false } - fn cargo_cause(&self) -> Option<&CargoError>{ None } +pub trait CargoResultExt { + fn chain_err(self, f: F) -> Result> + where + F: FnOnce() -> D, + D: fmt::Display + Send + Sync + 'static; } -impl Error for Box { - fn description(&self) -> &str { (**self).description() } - fn cause(&self) -> Option<&Error> { (**self).cause() } +impl CargoResultExt for Result +where + E: Into, +{ + fn chain_err(self, f: F) -> Result> + where + F: FnOnce() -> D, + D: fmt::Display + Send + Sync + 'static, + { + self.map_err(|failure| { + let err = failure.into(); + let context = f(); + trace!("error: {}", err); + trace!("\tcontext: {}", context); + err.context(context) + }) + } } -impl CargoError for Box { - fn is_human(&self) -> bool { (**self).is_human() } - fn cargo_cause(&self) -> Option<&CargoError> { (**self).cargo_cause() } +#[derive(Debug, Fail)] +#[fail(display = "failed to get 200 response from `{}`, got {}", url, code)] +pub struct HttpNot200 { + pub code: u32, + pub url: String, } -// ============================================================================= -// Chaining errors - -pub trait ChainError { - fn chain_error(self, callback: F) -> CargoResult - where E: CargoError, F: FnOnce() -> E; +pub struct Internal { + inner: Error, } -#[derive(Debug)] -struct ChainedError { - error: E, - cause: Box, -} - -impl<'a, T, F> ChainError for F where F: FnOnce() -> CargoResult { - fn chain_error(self, callback: C) -> CargoResult - where E: CargoError, C: FnOnce() -> E { - self().chain_error(callback) +impl Internal { + pub fn new(inner: Error) -> Internal { + Internal { inner } } } -impl ChainError for Result { - #[allow(trivial_casts)] - fn chain_error(self, callback: C) -> CargoResult - where E2: CargoError, C: FnOnce() -> E2 { - self.map_err(move |err| { - Box::new(ChainedError { - error: callback(), - cause: Box::new(err), - }) as Box - }) +impl Fail for Internal { + fn cause(&self) -> Option<&dyn Fail> { + self.inner.as_fail().cause() } } -impl ChainError for Box { - fn chain_error(self, callback: C) -> CargoResult - where E2: CargoError, C: FnOnce() -> E2 { - Err(Box::new(ChainedError { - error: callback(), - cause: self, - })) +impl fmt::Debug for Internal { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.fmt(f) } } -impl ChainError for Option { - fn chain_error(self, callback: C) -> CargoResult - where E: CargoError, C: FnOnce() -> E { - match self { - Some(t) => Ok(t), - None => Err(Box::new(callback())), - } +impl fmt::Display for Internal { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.inner.fmt(f) } } -impl Error for ChainedError { - fn description(&self) -> &str { self.error.description() } +/// Error wrapper related to a particular manifest and providing it's path. +/// +/// This error adds no displayable info of it's own. +pub struct ManifestError { + cause: Error, + manifest: PathBuf, } -impl fmt::Display for ChainedError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.error, f) +impl ManifestError { + pub fn new>(cause: E, manifest: PathBuf) -> Self { + Self { + cause: cause.into(), + manifest, + } } -} -impl CargoError for ChainedError { - fn is_human(&self) -> bool { self.error.is_human() } - fn cargo_cause(&self) -> Option<&CargoError> { Some(&*self.cause) } -} - -// ============================================================================= -// Process errors + pub fn manifest_path(&self) -> &PathBuf { + &self.manifest + } -pub struct ProcessError { - pub desc: String, - pub exit: Option, - pub output: Option, - cause: Option, + /// Returns an iterator over the `ManifestError` chain of causes. + /// + /// So if this error was not caused by another `ManifestError` this will be empty. + pub fn manifest_causes(&self) -> ManifestCauses<'_> { + ManifestCauses { current: self } + } } -impl Error for ProcessError { - fn description(&self) -> &str { &self.desc } - #[allow(trivial_casts)] - fn cause(&self) -> Option<&Error> { - self.cause.as_ref().map(|s| s as &Error) +impl Fail for ManifestError { + fn cause(&self) -> Option<&dyn Fail> { + self.cause.as_fail().cause() } } -impl fmt::Display for ProcessError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.desc, f) +impl fmt::Debug for ManifestError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.cause.fmt(f) } } -impl fmt::Debug for ProcessError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(self, f) + +impl fmt::Display for ManifestError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.cause.fmt(f) } } -// ============================================================================= -// Concrete errors - -struct ConcreteCargoError { - description: String, - detail: Option, - cause: Option>, - is_human: bool, +/// An iterator over the `ManifestError` chain of causes. +pub struct ManifestCauses<'a> { + current: &'a ManifestError, } -impl fmt::Display for ConcreteCargoError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "{}", self.description)); - if let Some(ref s) = self.detail { - try!(write!(f, " ({})", s)); - } - Ok(()) - } -} -impl fmt::Debug for ConcreteCargoError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(self, f) - } -} +impl<'a> Iterator for ManifestCauses<'a> { + type Item = &'a ManifestError; -impl Error for ConcreteCargoError { - fn description(&self) -> &str { &self.description } - fn cause(&self) -> Option<&Error> { - self.cause.as_ref().map(|c| { - let e: &Error = &**c; e - }) + fn next(&mut self) -> Option { + self.current = self.current.cause.downcast_ref()?; + Some(self.current) } } -impl CargoError for ConcreteCargoError { - fn is_human(&self) -> bool { - self.is_human - } -} +impl<'a> ::std::iter::FusedIterator for ManifestCauses<'a> {} // ============================================================================= -// Human errors +// Process errors +#[derive(Debug, Fail)] +#[fail(display = "{}", desc)] +pub struct ProcessError { + pub desc: String, + pub exit: Option, + pub output: Option, +} -#[derive(Debug)] -pub struct Human(pub E); +// ============================================================================= +// Cargo test errors. -impl Error for Human { - fn description(&self) -> &str { self.0.description() } - fn cause(&self) -> Option<&Error> { self.0.cause() } +/// Error when testcases fail +#[derive(Debug, Fail)] +#[fail(display = "{}", desc)] +pub struct CargoTestError { + pub test: Test, + pub desc: String, + pub exit: Option, + pub causes: Vec, } -impl fmt::Display for Human { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.0, f) +#[derive(Debug)] +pub enum Test { + Multiple, + Doc, + UnitTest { + kind: TargetKind, + name: String, + pkg_name: String, + }, +} + +impl CargoTestError { + pub fn new(test: Test, errors: Vec) -> Self { + if errors.is_empty() { + panic!("Cannot create CargoTestError from empty Vec") + } + let desc = errors + .iter() + .map(|error| error.desc.clone()) + .collect::>() + .join("\n"); + CargoTestError { + test, + desc, + exit: errors[0].exit, + causes: errors, + } } -} -impl CargoError for Human { - fn is_human(&self) -> bool { true } - fn cargo_cause(&self) -> Option<&CargoError> { self.0.cargo_cause() } + pub fn hint(&self, ws: &Workspace<'_>, opts: &CompileOptions<'_>) -> String { + match self.test { + Test::UnitTest { + ref kind, + ref name, + ref pkg_name, + } => { + let pkg_info = if opts.spec.needs_spec_flag(ws) { + format!("-p {} ", pkg_name) + } else { + String::new() + }; + + match *kind { + TargetKind::Bench => { + format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name) + } + TargetKind::Bin => { + format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name) + } + TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--lib'", pkg_info), + TargetKind::Test => { + format!("test failed, to rerun pass '{}--test {}'", pkg_info, name) + } + TargetKind::ExampleBin | TargetKind::ExampleLib(_) => { + format!("test failed, to rerun pass '{}--example {}", pkg_info, name) + } + _ => "test failed.".into(), + } + } + Test::Doc => "test failed, to rerun pass '--doc'".into(), + _ => "test failed.".into(), + } + } } // ============================================================================= // CLI errors -pub type CliResult = Result; +pub type CliResult = Result<(), CliError>; #[derive(Debug)] pub struct CliError { - pub error: Box, + pub error: Option, pub unknown: bool, - pub exit_code: i32 -} - -impl Error for CliError { - fn description(&self) -> &str { self.error.description() } - fn cause(&self) -> Option<&Error> { self.error.cause() } -} - -impl fmt::Display for CliError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.error, f) - } + pub exit_code: i32, } impl CliError { - pub fn new(error: &str, code: i32) -> CliError { - let error = human(error.to_string()); - CliError::from_boxed(error, code) - } - - pub fn from_error(error: E, code: i32) -> CliError { - let error = Box::new(error); - CliError::from_boxed(error, code) + pub fn new(error: failure::Error, code: i32) -> CliError { + let unknown = error.downcast_ref::().is_some(); + CliError { + error: Some(error), + exit_code: code, + unknown, + } } - pub fn from_boxed(error: Box, code: i32) -> CliError { - let human = error.is_human(); - CliError { error: error, exit_code: code, unknown: !human } + pub fn code(code: i32) -> CliError { + CliError { + error: None, + exit_code: code, + unknown: false, + } } } -impl From> for CliError { - fn from(err: Box) -> CliError { - CliError::from_boxed(err, 101) +impl From for CliError { + fn from(err: failure::Error) -> CliError { + CliError::new(err, 101) } } -// ============================================================================= -// various impls - -macro_rules! from_error { - ($($p:ty,)*) => ( - $(impl From<$p> for Box { - fn from(t: $p) -> Box { Box::new(t) } - })* - ) -} - -from_error! { - semver::ReqParseError, - io::Error, - ProcessError, - git2::Error, - json::DecoderError, - curl::ErrCode, - CliError, - toml::Error, - url::ParseError, - toml::DecodeError, - ffi::NulError, -} - -impl From> for Box { - fn from(t: Human) -> Box { Box::new(t) } +impl From for CliError { + fn from(err: clap::Error) -> CliError { + let code = if err.use_stderr() { 1 } else { 0 }; + CliError::new(err.into(), code) + } } -impl CargoError for semver::ReqParseError {} -impl CargoError for io::Error {} -impl CargoError for git2::Error {} -impl CargoError for json::DecoderError {} -impl CargoError for curl::ErrCode {} -impl CargoError for ProcessError {} -impl CargoError for CliError {} -impl CargoError for toml::Error {} -impl CargoError for toml::DecodeError {} -impl CargoError for url::ParseError {} -impl CargoError for ffi::NulError {} - // ============================================================================= // Construction helpers -pub fn process_error(msg: &str, - cause: Option, - status: Option<&ExitStatus>, - output: Option<&Output>) -> ProcessError { +pub fn process_error( + msg: &str, + status: Option, + output: Option<&Output>, +) -> ProcessError { let exit = match status { - Some(s) => s.to_string(), + Some(s) => status_to_string(s), None => "never executed".to_string(), }; let mut desc = format!("{} ({})", &msg, exit); if let Some(out) = output { match str::from_utf8(&out.stdout) { - Ok(s) if s.trim().len() > 0 => { + Ok(s) if !s.trim().is_empty() => { desc.push_str("\n--- stdout\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } match str::from_utf8(&out.stderr) { - Ok(s) if s.trim().len() > 0 => { + Ok(s) if !s.trim().is_empty() => { desc.push_str("\n--- stderr\n"); desc.push_str(s); } @@ -310,49 +300,99 @@ pub fn process_error(msg: &str, } } - ProcessError { - desc: desc, - exit: status.map(|a| a.clone()), - output: output.map(|a| a.clone()), - cause: cause, + return ProcessError { + desc, + exit: status, + output: output.cloned(), + }; + + #[cfg(unix)] + fn status_to_string(status: ExitStatus) -> String { + use std::os::unix::process::*; + + if let Some(signal) = status.signal() { + let name = match signal as libc::c_int { + libc::SIGABRT => ", SIGABRT: process abort signal", + libc::SIGALRM => ", SIGALRM: alarm clock", + libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", + libc::SIGHUP => ", SIGHUP: hangup", + libc::SIGILL => ", SIGILL: illegal instruction", + libc::SIGINT => ", SIGINT: terminal interrupt signal", + libc::SIGKILL => ", SIGKILL: kill", + libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", + libc::SIGQUIT => ", SIGQUIT: terminal quite signal", + libc::SIGSEGV => ", SIGSEGV: invalid memory reference", + libc::SIGTERM => ", SIGTERM: termination signal", + libc::SIGBUS => ", SIGBUS: access to undefined memory", + #[cfg(not(target_os = "haiku"))] + libc::SIGSYS => ", SIGSYS: bad system call", + libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", + _ => "", + }; + format!("signal: {}{}", signal, name) + } else { + status.to_string() + } } -} -pub fn internal_error(error: &str, detail: &str) -> Box { - Box::new(ConcreteCargoError { - description: error.to_string(), - detail: Some(detail.to_string()), - cause: None, - is_human: false - }) + #[cfg(windows)] + fn status_to_string(status: ExitStatus) -> String { + use winapi::shared::minwindef::DWORD; + use winapi::um::winnt::*; + + let mut base = status.to_string(); + let extra = match status.code().unwrap() as DWORD { + STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION", + STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR", + STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE", + STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER", + STATUS_NO_MEMORY => "STATUS_NO_MEMORY", + STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION", + STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION", + STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION", + STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED", + STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND", + STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO", + STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT", + STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION", + STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW", + STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK", + STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW", + STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO", + STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW", + STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION", + STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW", + STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND", + STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND", + STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND", + STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT", + STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED", + STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS", + STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS", + STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION", + STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION", + STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN", + STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE", + _ => return base, + }; + base.push_str(", "); + base.push_str(extra); + base + } } -pub fn internal(error: S) -> Box { - Box::new(ConcreteCargoError { - description: error.to_string(), - detail: None, - cause: None, - is_human: false - }) +pub fn internal(error: S) -> failure::Error { + _internal(&error) } -pub fn human(error: S) -> Box { - Box::new(ConcreteCargoError { - description: error.to_string(), - detail: None, - cause: None, - is_human: true - }) +fn _internal(error: &dyn fmt::Display) -> failure::Error { + Internal::new(failure::format_err!("{}", error)).into() } -pub fn caused_human(error: S, cause: E) -> Box - where S: fmt::Display, - E: Error + Send + 'static -{ - Box::new(ConcreteCargoError { - description: error.to_string(), - detail: None, - cause: Some(Box::new(cause)), - is_human: true - }) +pub fn display_causes(error: &Error) -> String { + error + .iter_chain() + .map(|e| e.to_string()) + .collect::>() + .join("\nCaused by:\n ") } diff --git a/src/cargo/util/flock.rs b/src/cargo/util/flock.rs new file mode 100644 index 00000000000..96458bdf356 --- /dev/null +++ b/src/cargo/util/flock.rs @@ -0,0 +1,369 @@ +use std::fs::{self, File, OpenOptions}; +use std::io; +use std::io::{Read, Seek, SeekFrom, Write}; +use std::path::{Display, Path, PathBuf}; + +use fs2::{lock_contended_error, FileExt}; +#[allow(unused_imports)] +use libc; +use termcolor::Color::Cyan; + +use crate::util::errors::{CargoResult, CargoResultExt}; +use crate::util::paths; +use crate::util::Config; + +#[derive(Debug)] +pub struct FileLock { + f: Option, + path: PathBuf, + state: State, +} + +#[derive(PartialEq, Debug)] +enum State { + Unlocked, + Shared, + Exclusive, +} + +impl FileLock { + /// Returns the underlying file handle of this lock. + pub fn file(&self) -> &File { + self.f.as_ref().unwrap() + } + + /// Returns the underlying path that this lock points to. + /// + /// Note that special care must be taken to ensure that the path is not + /// referenced outside the lifetime of this lock. + pub fn path(&self) -> &Path { + assert_ne!(self.state, State::Unlocked); + &self.path + } + + /// Returns the parent path containing this file + pub fn parent(&self) -> &Path { + assert_ne!(self.state, State::Unlocked); + self.path.parent().unwrap() + } + + /// Removes all sibling files to this locked file. + /// + /// This can be useful if a directory is locked with a sentinel file but it + /// needs to be cleared out as it may be corrupt. + pub fn remove_siblings(&self) -> CargoResult<()> { + let path = self.path(); + for entry in path.parent().unwrap().read_dir()? { + let entry = entry?; + if Some(&entry.file_name()[..]) == path.file_name() { + continue; + } + let kind = entry.file_type()?; + if kind.is_dir() { + paths::remove_dir_all(entry.path())?; + } else { + paths::remove_file(entry.path())?; + } + } + Ok(()) + } +} + +impl Read for FileLock { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.file().read(buf) + } +} + +impl Seek for FileLock { + fn seek(&mut self, to: SeekFrom) -> io::Result { + self.file().seek(to) + } +} + +impl Write for FileLock { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.file().write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.file().flush() + } +} + +impl Drop for FileLock { + fn drop(&mut self) { + if self.state != State::Unlocked { + if let Some(f) = self.f.take() { + let _ = f.unlock(); + } + } + } +} + +/// A "filesystem" is intended to be a globally shared, hence locked, resource +/// in Cargo. +/// +/// The `Path` of a filesystem cannot be learned unless it's done in a locked +/// fashion, and otherwise functions on this structure are prepared to handle +/// concurrent invocations across multiple instances of Cargo. +#[derive(Clone, Debug)] +pub struct Filesystem { + root: PathBuf, +} + +impl Filesystem { + /// Creates a new filesystem to be rooted at the given path. + pub fn new(path: PathBuf) -> Filesystem { + Filesystem { root: path } + } + + /// Like `Path::join`, creates a new filesystem rooted at this filesystem + /// joined with the given path. + pub fn join>(&self, other: T) -> Filesystem { + Filesystem::new(self.root.join(other)) + } + + /// Like `Path::push`, pushes a new path component onto this filesystem. + pub fn push>(&mut self, other: T) { + self.root.push(other); + } + + /// Consumes this filesystem and returns the underlying `PathBuf`. + /// + /// Note that this is a relatively dangerous operation and should be used + /// with great caution!. + pub fn into_path_unlocked(self) -> PathBuf { + self.root + } + + /// Returns the underlying `Path`. + /// + /// Note that this is a relatively dangerous operation and should be used + /// with great caution!. + pub fn as_path_unlocked(&self) -> &Path { + &self.root + } + + /// Creates the directory pointed to by this filesystem. + /// + /// Handles errors where other Cargo processes are also attempting to + /// concurrently create this directory. + pub fn create_dir(&self) -> io::Result<()> { + fs::create_dir_all(&self.root) + } + + /// Returns an adaptor that can be used to print the path of this + /// filesystem. + pub fn display(&self) -> Display<'_> { + self.root.display() + } + + /// Opens exclusive access to a file, returning the locked version of a + /// file. + /// + /// This function will create a file at `path` if it doesn't already exist + /// (including intermediate directories), and then it will acquire an + /// exclusive lock on `path`. If the process must block waiting for the + /// lock, the `msg` is printed to `config`. + /// + /// The returned file can be accessed to look at the path and also has + /// read/write access to the underlying file. + pub fn open_rw

(&self, path: P, config: &Config, msg: &str) -> CargoResult + where + P: AsRef, + { + self.open( + path.as_ref(), + OpenOptions::new().read(true).write(true).create(true), + State::Exclusive, + config, + msg, + ) + } + + /// Opens shared access to a file, returning the locked version of a file. + /// + /// This function will fail if `path` doesn't already exist, but if it does + /// then it will acquire a shared lock on `path`. If the process must block + /// waiting for the lock, the `msg` is printed to `config`. + /// + /// The returned file can be accessed to look at the path and also has read + /// access to the underlying file. Any writes to the file will return an + /// error. + pub fn open_ro

(&self, path: P, config: &Config, msg: &str) -> CargoResult + where + P: AsRef, + { + self.open( + path.as_ref(), + OpenOptions::new().read(true), + State::Shared, + config, + msg, + ) + } + + fn open( + &self, + path: &Path, + opts: &OpenOptions, + state: State, + config: &Config, + msg: &str, + ) -> CargoResult { + let path = self.root.join(path); + + // If we want an exclusive lock then if we fail because of NotFound it's + // likely because an intermediate directory didn't exist, so try to + // create the directory and then continue. + let f = opts + .open(&path) + .or_else(|e| { + if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive { + fs::create_dir_all(path.parent().unwrap())?; + opts.open(&path) + } else { + Err(e) + } + }) + .chain_err(|| format!("failed to open: {}", path.display()))?; + match state { + State::Exclusive => { + acquire(config, msg, &path, &|| f.try_lock_exclusive(), &|| { + f.lock_exclusive() + })?; + } + State::Shared => { + acquire(config, msg, &path, &|| f.try_lock_shared(), &|| { + f.lock_shared() + })?; + } + State::Unlocked => {} + } + Ok(FileLock { + f: Some(f), + path, + state, + }) + } +} + +impl PartialEq for Filesystem { + fn eq(&self, other: &Path) -> bool { + self.root == other + } +} + +impl PartialEq for Path { + fn eq(&self, other: &Filesystem) -> bool { + self == other.root + } +} + +/// Acquires a lock on a file in a "nice" manner. +/// +/// Almost all long-running blocking actions in Cargo have a status message +/// associated with them as we're not sure how long they'll take. Whenever a +/// conflicted file lock happens, this is the case (we're not sure when the lock +/// will be released). +/// +/// This function will acquire the lock on a `path`, printing out a nice message +/// to the console if we have to wait for it. It will first attempt to use `try` +/// to acquire a lock on the crate, and in the case of contention it will emit a +/// status message based on `msg` to `config`'s shell, and then use `block` to +/// block waiting to acquire a lock. +/// +/// Returns an error if the lock could not be acquired or if any error other +/// than a contention error happens. +fn acquire( + config: &Config, + msg: &str, + path: &Path, + r#try: &dyn Fn() -> io::Result<()>, + block: &dyn Fn() -> io::Result<()>, +) -> CargoResult<()> { + // File locking on Unix is currently implemented via `flock`, which is known + // to be broken on NFS. We could in theory just ignore errors that happen on + // NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking + // forever**, even if the "non-blocking" flag is passed! + // + // As a result, we just skip all file locks entirely on NFS mounts. That + // should avoid calling any `flock` functions at all, and it wouldn't work + // there anyway. + // + // [1]: https://github.com/rust-lang/cargo/issues/2615 + if is_on_nfs_mount(path) { + return Ok(()); + } + + match r#try() { + Ok(()) => return Ok(()), + + // In addition to ignoring NFS which is commonly not working we also + // just ignore locking on filesystems that look like they don't + // implement file locking. We detect that here via the return value of + // locking (e.g., inspecting errno). + #[cfg(unix)] + Err(ref e) if e.raw_os_error() == Some(libc::ENOTSUP) => return Ok(()), + + #[cfg(target_os = "linux")] + Err(ref e) if e.raw_os_error() == Some(libc::ENOSYS) => return Ok(()), + + Err(e) => { + if e.raw_os_error() != lock_contended_error().raw_os_error() { + let e = failure::Error::from(e); + let cx = format!("failed to lock file: {}", path.display()); + return Err(e.context(cx).into()); + } + } + } + let msg = format!("waiting for file lock on {}", msg); + config.shell().status_with_color("Blocking", &msg, Cyan)?; + + // We're about to block the current process and not really do anything + // productive for what could possibly be a very long time. We could be + // waiting, for example, on another Cargo to finish a download, finish an + // entire build, etc. Since we're not doing anything productive we're not + // making good use of our jobserver token, if we have one. + // + // This can typically come about if `cargo` is invoked from `make` (or some + // other jobserver-providing system). In this situation it's actually best + // if we release the token back to the original jobserver to let some other + // cpu-hungry work continue to make progress. After we're done blocking + // we'll block waiting to reacquire a token as we'll probably be doing cpu + // hungry work ourselves. + let jobserver = config.jobserver_from_env(); + if let Some(server) = jobserver { + server.release_raw()?; + } + let result = block().chain_err(|| format!("failed to lock file: {}", path.display())); + if let Some(server) = jobserver { + server.acquire_raw()?; + } + return Ok(result?); + + #[cfg(all(target_os = "linux", not(target_env = "musl")))] + fn is_on_nfs_mount(path: &Path) -> bool { + use std::ffi::CString; + use std::mem; + use std::os::unix::prelude::*; + + let path = match CString::new(path.as_os_str().as_bytes()) { + Ok(path) => path, + Err(_) => return false, + }; + + unsafe { + let mut buf: libc::statfs = mem::zeroed(); + let r = libc::statfs(path.as_ptr(), &mut buf); + + r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32 + } + } + + #[cfg(any(not(target_os = "linux"), target_env = "musl"))] + fn is_on_nfs_mount(_path: &Path) -> bool { + false + } +} diff --git a/src/cargo/util/graph.rs b/src/cargo/util/graph.rs index 53a2b9c5ac7..00f58a292fb 100644 --- a/src/cargo/util/graph.rs +++ b/src/cargo/util/graph.rs @@ -1,97 +1,170 @@ +use std::borrow::Borrow; +use std::collections::BTreeSet; use std::fmt; -use std::hash::Hash; -use std::collections::hash_set::{HashSet, Iter}; -use std::collections::hash_map::{HashMap, Keys}; -pub struct Graph { - nodes: HashMap> -} +use im_rc; -enum Mark { - InProgress, - Done +pub struct Graph { + nodes: im_rc::OrdMap>, } -pub type Nodes<'a, N> = Keys<'a, N, HashSet>; -pub type Edges<'a, N> = Iter<'a, N>; +impl Graph { + pub fn new() -> Graph { + Graph { + nodes: im_rc::OrdMap::new(), + } + } -impl Graph { - pub fn new() -> Graph { - Graph { nodes: HashMap::new() } + pub fn add(&mut self, node: N) { + self.nodes.entry(node).or_insert_with(im_rc::OrdMap::new); } - pub fn add(&mut self, node: N, children: &[N]) { - self.nodes.insert(node, children.iter().map(|n| n.clone()).collect()); + pub fn link(&mut self, node: N, child: N) -> &mut E { + self.nodes + .entry(node) + .or_insert_with(im_rc::OrdMap::new) + .entry(child) + .or_insert_with(Default::default) } - pub fn link(&mut self, node: N, child: N) { - self.nodes.entry(node).or_insert_with(|| HashSet::new()).insert(child); + pub fn contains(&self, k: &Q) -> bool + where + N: Borrow, + Q: Ord + Eq, + { + self.nodes.contains_key(k) } - pub fn get_nodes(&self) -> &HashMap> { - &self.nodes + pub fn edge(&self, from: &N, to: &N) -> Option<&E> { + self.nodes.get(from)?.get(to) } - pub fn edges(&self, node: &N) -> Option> { - self.nodes.get(node).map(|set| set.iter()) + pub fn edges(&self, from: &N) -> impl Iterator { + self.nodes.get(from).into_iter().flat_map(|x| x.iter()) } - pub fn sort(&self) -> Option> { + /// A topological sort of the `Graph` + pub fn sort(&self) -> Vec { let mut ret = Vec::new(); - let mut marks = HashMap::new(); + let mut marks = BTreeSet::new(); for node in self.nodes.keys() { - self.visit(node, &mut ret, &mut marks); + self.sort_inner_visit(node, &mut ret, &mut marks); } - Some(ret) + ret } - fn visit(&self, node: &N, dst: &mut Vec, marks: &mut HashMap) { - if marks.contains_key(node) { + fn sort_inner_visit(&self, node: &N, dst: &mut Vec, marks: &mut BTreeSet) { + if !marks.insert(node.clone()) { return; } - marks.insert(node.clone(), Mark::InProgress); - - for child in self.nodes[node].iter() { - self.visit(child, dst, marks); + for child in self.nodes[node].keys() { + self.sort_inner_visit(child, dst, marks); } dst.push(node.clone()); - marks.insert(node.clone(), Mark::Done); } - pub fn iter(&self) -> Nodes { + pub fn iter(&self) -> impl Iterator { self.nodes.keys() } + + /// Checks if there is a path from `from` to `to`. + pub fn is_path_from_to<'a>(&'a self, from: &'a N, to: &'a N) -> bool { + let mut stack = vec![from]; + let mut seen = BTreeSet::new(); + seen.insert(from); + while let Some(iter) = stack.pop().and_then(|p| self.nodes.get(p)) { + for p in iter.keys() { + if p == to { + return true; + } + if seen.insert(p) { + stack.push(p); + } + } + } + false + } + + /// Resolves one of the paths from the given dependent package down to + /// a leaf. + pub fn path_to_bottom<'a>(&'a self, mut pkg: &'a N) -> Vec<&'a N> { + let mut result = vec![pkg]; + while let Some(p) = self.nodes.get(pkg).and_then(|p| { + p.iter() + // Note that we can have "cycles" introduced through dev-dependency + // edges, so make sure we don't loop infinitely. + .find(|&(node, _)| !result.contains(&node)) + .map(|(ref p, _)| p) + }) { + result.push(p); + pkg = p; + } + result + } + + /// Resolves one of the paths from the given dependent package up to + /// the root. + pub fn path_to_top<'a>(&'a self, mut pkg: &'a N) -> Vec<&'a N> { + // Note that this implementation isn't the most robust per se, we'll + // likely have to tweak this over time. For now though it works for what + // it's used for! + let mut result = vec![pkg]; + let first_pkg_depending_on = |pkg: &N, res: &[&N]| { + self.nodes + .iter() + .filter(|&(_, adjacent)| adjacent.contains_key(pkg)) + // Note that we can have "cycles" introduced through dev-dependency + // edges, so make sure we don't loop infinitely. + .find(|&(node, _)| !res.contains(&node)) + .map(|(ref p, _)| p) + }; + while let Some(p) = first_pkg_depending_on(pkg, &result) { + result.push(p); + pkg = p; + } + result + } } -impl fmt::Debug for Graph { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - try!(writeln!(fmt, "Graph {{")); +impl Default for Graph { + fn default() -> Graph { + Graph::new() + } +} + +impl fmt::Debug for Graph { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + writeln!(fmt, "Graph {{")?; - for (n, e) in self.nodes.iter() { - try!(writeln!(fmt, " - {}", n)); + for (n, e) in &self.nodes { + writeln!(fmt, " - {}", n)?; - for n in e.iter() { - try!(writeln!(fmt, " - {}", n)); + for n in e.keys() { + writeln!(fmt, " - {}", n)?; } } - try!(write!(fmt, "}}")); + write!(fmt, "}}")?; Ok(()) } } -impl PartialEq for Graph { - fn eq(&self, other: &Graph) -> bool { self.nodes.eq(&other.nodes) } +impl PartialEq for Graph { + fn eq(&self, other: &Graph) -> bool { + self.nodes.eq(&other.nodes) + } } -impl Eq for Graph {} +impl Eq for Graph {} -impl Clone for Graph { - fn clone(&self) -> Graph { - Graph { nodes: self.nodes.clone() } +impl Clone for Graph { + fn clone(&self) -> Graph { + Graph { + nodes: self.nodes.clone(), + } } } diff --git a/src/cargo/util/hex.rs b/src/cargo/util/hex.rs index 7530c5472f2..ed60f9b8e5f 100644 --- a/src/cargo/util/hex.rs +++ b/src/cargo/util/hex.rs @@ -1,22 +1,27 @@ -use std::hash::{Hasher, Hash, SipHasher}; +#![allow(deprecated)] -use rustc_serialize::hex::ToHex; +use hex; +use std::hash::{Hash, Hasher, SipHasher}; pub fn to_hex(num: u64) -> String { - [ - (num >> 0) as u8, - (num >> 8) as u8, + hex::encode(&[ + (num >> 0) as u8, + (num >> 8) as u8, (num >> 16) as u8, (num >> 24) as u8, (num >> 32) as u8, (num >> 40) as u8, (num >> 48) as u8, (num >> 56) as u8, - ].to_hex() + ]) } -pub fn short_hash(hashable: &H) -> String { +pub fn hash_u64(hashable: H) -> u64 { let mut hasher = SipHasher::new_with_keys(0, 0); hashable.hash(&mut hasher); - to_hex(hasher.finish()) + hasher.finish() +} + +pub fn short_hash(hashable: &H) -> String { + to_hex(hash_u64(hashable)) } diff --git a/src/cargo/util/important_paths.rs b/src/cargo/util/important_paths.rs index 1d6076b4b87..6476c3e196b 100644 --- a/src/cargo/util/important_paths.rs +++ b/src/cargo/util/important_paths.rs @@ -1,68 +1,32 @@ -use std::env; +use crate::util::errors::CargoResult; +use crate::util::paths; use std::fs; use std::path::{Path, PathBuf}; -use util::{CargoResult, human, ChainError}; -/// Iteratively search for `file` in `pwd` and its parents, returning -/// the path of the directory. -pub fn find_project(pwd: &Path, file: &str) -> CargoResult { - find_project_manifest(pwd, file).map(|mut p| { - // remove the file, leaving just the directory - p.pop(); - p - }) -} - -/// Iteratively search for `file` in `pwd` and its parents, returning -/// the path to the file. -pub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult { - let mut current = pwd; - - loop { +/// Finds the root `Cargo.toml`. +pub fn find_root_manifest_for_wd(cwd: &Path) -> CargoResult { + let file = "Cargo.toml"; + for current in paths::ancestors(cwd) { let manifest = current.join(file); if fs::metadata(&manifest).is_ok() { - return Ok(manifest) - } - - match current.parent() { - Some(p) => current = p, - None => break, + return Ok(manifest); } } - Err(human(format!("Could not find `{}` in `{}` or any parent directory", - file, pwd.display()))) -} - -/// Find the root Cargo.toml -pub fn find_root_manifest_for_cwd(manifest_path: Option) - -> CargoResult { - let cwd = try!(env::current_dir().chain_error(|| { - human("Couldn't determine the current working directory") - })); - match manifest_path { - Some(path) => { - let absolute_path = cwd.join(&path); - if !absolute_path.ends_with("Cargo.toml") { - return Err(human("the manifest-path must be a path to a Cargo.toml file")) - } - if !fs::metadata(&absolute_path).is_ok() { - return Err(human(format!("manifest path `{}` does not exist", path))) - } - Ok(absolute_path) - }, - None => find_project_manifest(&cwd, "Cargo.toml"), - } + failure::bail!( + "could not find `{}` in `{}` or any parent directory", + file, + cwd.display() + ) } -/// Return the path to the `file` in `pwd`, if it exists. +/// Returns the path to the `file` in `pwd`, if it exists. pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult { let manifest = pwd.join(file); - if fs::metadata(&manifest).is_ok() { + if manifest.exists() { Ok(manifest) } else { - Err(human(format!("Could not find `{}` in `{}`", - file, pwd.display()))) + failure::bail!("Could not find `{}` in `{}`", file, pwd.display()) } } diff --git a/src/cargo/util/into_url.rs b/src/cargo/util/into_url.rs new file mode 100644 index 00000000000..f22bae0223d --- /dev/null +++ b/src/cargo/util/into_url.rs @@ -0,0 +1,30 @@ +use std::path::{Path, PathBuf}; + +use url::Url; + +use crate::util::CargoResult; + +/// A type that can be converted to a Url +pub trait IntoUrl { + /// Performs the conversion + fn into_url(self) -> CargoResult; +} + +impl<'a> IntoUrl for &'a str { + fn into_url(self) -> CargoResult { + Url::parse(self).map_err(|s| failure::format_err!("invalid url `{}`: {}", self, s)) + } +} + +impl<'a> IntoUrl for &'a Path { + fn into_url(self) -> CargoResult { + Url::from_file_path(self) + .map_err(|()| failure::format_err!("invalid path url `{}`", self.display())) + } +} + +impl<'a> IntoUrl for &'a PathBuf { + fn into_url(self) -> CargoResult { + self.as_path().into_url() + } +} diff --git a/src/cargo/util/into_url_with_base.rs b/src/cargo/util/into_url_with_base.rs new file mode 100644 index 00000000000..2e869bf05b0 --- /dev/null +++ b/src/cargo/util/into_url_with_base.rs @@ -0,0 +1,50 @@ +use crate::util::{CargoResult, IntoUrl}; + +use url::Url; + +/// A type that can be interpreted as a relative Url and converted to +/// a Url. +pub trait IntoUrlWithBase { + /// Performs the conversion + fn into_url_with_base(self, base: Option) -> CargoResult; +} + +impl<'a> IntoUrlWithBase for &'a str { + fn into_url_with_base(self, base: Option) -> CargoResult { + let base_url = match base { + Some(base) => Some( + base.into_url() + .map_err(|s| failure::format_err!("invalid url `{}`: {}", self, s))?, + ), + None => None, + }; + + Url::options() + .base_url(base_url.as_ref()) + .parse(self) + .map_err(|s| failure::format_err!("invalid url `{}`: {}", self, s)) + } +} + +#[cfg(test)] +mod tests { + use crate::util::IntoUrlWithBase; + + #[test] + fn into_url_with_base() { + assert_eq!( + "rel/path" + .into_url_with_base(Some("file:///abs/path/")) + .unwrap() + .to_string(), + "file:///abs/path/rel/path" + ); + assert_eq!( + "rel/path" + .into_url_with_base(Some("file:///abs/path/popped-file")) + .unwrap() + .to_string(), + "file:///abs/path/rel/path" + ); + } +} diff --git a/src/cargo/util/job.rs b/src/cargo/util/job.rs new file mode 100644 index 00000000000..27b3b953f1a --- /dev/null +++ b/src/cargo/util/job.rs @@ -0,0 +1,142 @@ +//! Job management (mostly for windows) +//! +//! Most of the time when you're running cargo you expect Ctrl-C to actually +//! terminate the entire tree of processes in play, not just the one at the top +//! (cago). This currently works "by default" on Unix platforms because Ctrl-C +//! actually sends a signal to the *process group* rather than the parent +//! process, so everything will get torn down. On Windows, however, this does +//! not happen and Ctrl-C just kills cargo. +//! +//! To achieve the same semantics on Windows we use Job Objects to ensure that +//! all processes die at the same time. Job objects have a mode of operation +//! where when all handles to the object are closed it causes all child +//! processes associated with the object to be terminated immediately. +//! Conveniently whenever a process in the job object spawns a new process the +//! child will be associated with the job object as well. This means if we add +//! ourselves to the job object we create then everything will get torn down! + +pub use self::imp::Setup; + +pub fn setup() -> Option { + unsafe { imp::setup() } +} + +#[cfg(unix)] +mod imp { + use libc; + use std::env; + + pub type Setup = (); + + pub unsafe fn setup() -> Option<()> { + // There's a test case for the behavior of + // when-cargo-is-killed-subprocesses-are-also-killed, but that requires + // one cargo spawned to become its own session leader, so we do that + // here. + if env::var("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE").is_ok() { + libc::setsid(); + } + Some(()) + } +} + +#[cfg(windows)] +mod imp { + use std::io; + use std::mem; + use std::ptr; + + use log::info; + + use winapi::shared::minwindef::*; + use winapi::um::handleapi::*; + use winapi::um::jobapi2::*; + use winapi::um::processthreadsapi::*; + use winapi::um::winnt::HANDLE; + use winapi::um::winnt::*; + + pub struct Setup { + job: Handle, + } + + pub struct Handle { + inner: HANDLE, + } + + fn last_err() -> io::Error { + io::Error::last_os_error() + } + + pub unsafe fn setup() -> Option { + // Creates a new job object for us to use and then adds ourselves to it. + // Note that all errors are basically ignored in this function, + // intentionally. Job objects are "relatively new" in Windows, + // particularly the ability to support nested job objects. Older + // Windows installs don't support this ability. We probably don't want + // to force Cargo to abort in this situation or force others to *not* + // use job objects, so we instead just ignore errors and assume that + // we're otherwise part of someone else's job object in this case. + + let job = CreateJobObjectW(ptr::null_mut(), ptr::null()); + if job.is_null() { + return None; + } + let job = Handle { inner: job }; + + // Indicate that when all handles to the job object are gone that all + // process in the object should be killed. Note that this includes our + // entire process tree by default because we've added ourselves and + // our children will reside in the job once we spawn a process. + let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; + info = mem::zeroed(); + info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; + let r = SetInformationJobObject( + job.inner, + JobObjectExtendedLimitInformation, + &mut info as *mut _ as LPVOID, + mem::size_of_val(&info) as DWORD, + ); + if r == 0 { + return None; + } + + // Assign our process to this job object, meaning that our children will + // now live or die based on our existence. + let me = GetCurrentProcess(); + let r = AssignProcessToJobObject(job.inner, me); + if r == 0 { + return None; + } + + Some(Setup { job }) + } + + impl Drop for Setup { + fn drop(&mut self) { + // On normal exits (not ctrl-c), we don't want to kill any child + // processes. The destructor here configures our job object to + // **not** kill everything on close, then closes the job object. + unsafe { + let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; + info = mem::zeroed(); + let r = SetInformationJobObject( + self.job.inner, + JobObjectExtendedLimitInformation, + &mut info as *mut _ as LPVOID, + mem::size_of_val(&info) as DWORD, + ); + if r == 0 { + info!("failed to configure job object to defaults: {}", last_err()); + } + } + } + } + + impl Drop for Handle { + fn drop(&mut self) { + unsafe { + CloseHandle(self.inner); + } + } + } +} diff --git a/src/cargo/util/lev_distance.rs b/src/cargo/util/lev_distance.rs index 4ae55a9ebc7..db78e13b641 100644 --- a/src/cargo/util/lev_distance.rs +++ b/src/cargo/util/lev_distance.rs @@ -1,29 +1,21 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - use std::cmp; pub fn lev_distance(me: &str, t: &str) -> usize { - if me.is_empty() { return t.chars().count(); } - if t.is_empty() { return me.chars().count(); } + if me.is_empty() { + return t.chars().count(); + } + if t.is_empty() { + return me.chars().count(); + } - let mut dcol = (0..t.len() + 1).collect::>(); + let mut dcol = (0..=t.len()).collect::>(); let mut t_last = 0; for (i, sc) in me.chars().enumerate() { - let mut current = i; dcol[0] = current + 1; for (j, tc) in t.chars().enumerate() { - let next = dcol[j + 1]; if sc == tc { @@ -41,13 +33,42 @@ pub fn lev_distance(me: &str, t: &str) -> usize { dcol[t_last + 1] } +/// Find the closest element from `iter` matching `choice`. The `key` callback +/// is used to select a `&str` from the iterator to compare against `choice`. +pub fn closest<'a, T>( + choice: &str, + iter: impl Iterator, + key: impl Fn(&T) -> &'a str, +) -> Option { + // Only consider candidates with a lev_distance of 3 or less so we don't + // suggest out-of-the-blue options. + iter.map(|e| (lev_distance(choice, key(&e)), e)) + .filter(|&(d, _)| d < 4) + .min_by_key(|t| t.0) + .map(|t| t.1) +} + +/// Version of `closest` that returns a common "suggestion" that can be tacked +/// onto the end of an error message. +pub fn closest_msg<'a, T>( + choice: &str, + iter: impl Iterator, + key: impl Fn(&T) -> &'a str, +) -> String { + match closest(choice, iter, &key) { + Some(e) => format!("\n\n\tDid you mean `{}`?", key(&e)), + None => String::new(), + } +} + #[test] fn test_lev_distance() { - use std::char::{ from_u32, MAX }; + use std::char::{from_u32, MAX}; // Test bytelength agnosticity for c in (0u32..MAX as u32) - .filter_map(|i| from_u32(i)) - .map(|i| i.to_string()) { + .filter_map(from_u32) + .map(|i| i.to_string()) + { assert_eq!(lev_distance(&c, &c), 0); } diff --git a/src/cargo/util/lockserver.rs b/src/cargo/util/lockserver.rs new file mode 100644 index 00000000000..9c4878dfc14 --- /dev/null +++ b/src/cargo/util/lockserver.rs @@ -0,0 +1,171 @@ +//! An implementation of IPC locks, guaranteed to be released if a process dies +//! +//! This module implements a locking server/client where the main `cargo fix` +//! process will start up a server and then all the client processes will +//! connect to it. The main purpose of this file is to enusre that each crate +//! (aka file entry point) is only fixed by one process at a time, currently +//! concurrent fixes can't happen. +//! +//! The basic design here is to use a TCP server which is pretty portable across +//! platforms. For simplicity it just uses threads as well. Clients connect to +//! the main server, inform the server what its name is, and then wait for the +//! server to give it the lock (aka write a byte). + +use std::collections::HashMap; +use std::io::{BufRead, BufReader, Read, Write}; +use std::net::{SocketAddr, TcpListener, TcpStream}; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::{Arc, Mutex}; +use std::thread::{self, JoinHandle}; + +use failure::{Error, ResultExt}; + +pub struct LockServer { + listener: TcpListener, + addr: SocketAddr, + threads: HashMap, + done: Arc, +} + +pub struct LockServerStarted { + done: Arc, + addr: SocketAddr, + thread: Option>, +} + +pub struct LockServerClient { + _socket: TcpStream, +} + +struct ServerClient { + thread: Option>, + lock: Arc)>>, +} + +impl LockServer { + pub fn new() -> Result { + let listener = TcpListener::bind("127.0.0.1:0") + .with_context(|_| "failed to bind TCP listener to manage locking")?; + let addr = listener.local_addr()?; + Ok(LockServer { + listener, + addr, + threads: HashMap::new(), + done: Arc::new(AtomicBool::new(false)), + }) + } + + pub fn addr(&self) -> &SocketAddr { + &self.addr + } + + pub fn start(self) -> Result { + let addr = self.addr; + let done = self.done.clone(); + let thread = thread::spawn(|| { + self.run(); + }); + Ok(LockServerStarted { + addr, + thread: Some(thread), + done, + }) + } + + fn run(mut self) { + while let Ok((client, _)) = self.listener.accept() { + if self.done.load(Ordering::SeqCst) { + break; + } + + // Learn the name of our connected client to figure out if it needs + // to wait for another process to release the lock. + let mut client = BufReader::new(client); + let mut name = String::new(); + if client.read_line(&mut name).is_err() { + continue; + } + let client = client.into_inner(); + + // If this "named mutex" is already registered and the thread is + // still going, put it on the queue. Otherwise wait on the previous + // thread and we'll replace it just below. + if let Some(t) = self.threads.get_mut(&name) { + let mut state = t.lock.lock().unwrap(); + if state.0 { + state.1.push(client); + continue; + } + drop(t.thread.take().unwrap().join()); + } + + let lock = Arc::new(Mutex::new((true, vec![client]))); + let lock2 = lock.clone(); + let thread = thread::spawn(move || { + loop { + let mut client = { + let mut state = lock2.lock().unwrap(); + if state.1.is_empty() { + state.0 = false; + break; + } else { + state.1.remove(0) + } + }; + // Inform this client that it now has the lock and wait for + // it to disconnect by waiting for EOF. + if client.write_all(&[1]).is_err() { + continue; + } + let mut dst = Vec::new(); + drop(client.read_to_end(&mut dst)); + } + }); + + self.threads.insert( + name, + ServerClient { + thread: Some(thread), + lock, + }, + ); + } + } +} + +impl Drop for LockServer { + fn drop(&mut self) { + for (_, mut client) in self.threads.drain() { + if let Some(thread) = client.thread.take() { + drop(thread.join()); + } + } + } +} + +impl Drop for LockServerStarted { + fn drop(&mut self) { + self.done.store(true, Ordering::SeqCst); + // Ignore errors here as this is largely best-effort + if TcpStream::connect(&self.addr).is_err() { + return; + } + drop(self.thread.take().unwrap().join()); + } +} + +impl LockServerClient { + pub fn lock(addr: &SocketAddr, name: impl AsRef<[u8]>) -> Result { + let mut client = TcpStream::connect(&addr) + .with_context(|_| "failed to connect to parent lock server")?; + client + .write_all(name.as_ref()) + .and_then(|_| client.write_all(b"\n")) + .with_context(|_| "failed to write to lock server")?; + let mut buf = [0]; + client + .read_exact(&mut buf) + .with_context(|_| "failed to acquire lock")?; + Ok(LockServerClient { _socket: client }) + } +} diff --git a/src/cargo/util/machine_message.rs b/src/cargo/util/machine_message.rs new file mode 100644 index 00000000000..b52e3b4302e --- /dev/null +++ b/src/cargo/util/machine_message.rs @@ -0,0 +1,75 @@ +use std::path::PathBuf; + +use serde::ser; +use serde::Serialize; +use serde_json::{self, json, value::RawValue}; + +use crate::core::{PackageId, Target}; + +pub trait Message: ser::Serialize { + fn reason(&self) -> &str; + + fn to_json_string(&self) -> String { + let json = serde_json::to_string(self).unwrap(); + assert!(json.starts_with("{\"")); + let reason = json!(self.reason()); + format!("{{\"reason\":{},{}", reason, &json[1..]) + } +} + +#[derive(Serialize)] +pub struct FromCompiler<'a> { + pub package_id: PackageId, + pub target: &'a Target, + pub message: Box, +} + +impl<'a> Message for FromCompiler<'a> { + fn reason(&self) -> &str { + "compiler-message" + } +} + +#[derive(Serialize)] +pub struct Artifact<'a> { + pub package_id: PackageId, + pub target: &'a Target, + pub profile: ArtifactProfile, + pub features: Vec, + pub filenames: Vec, + pub executable: Option, + pub fresh: bool, +} + +impl<'a> Message for Artifact<'a> { + fn reason(&self) -> &str { + "compiler-artifact" + } +} + +/// This is different from the regular `Profile` to maintain backwards +/// compatibility (in particular, `test` is no longer in `Profile`, but we +/// still want it to be included here). +#[derive(Serialize)] +pub struct ArtifactProfile { + pub opt_level: &'static str, + pub debuginfo: Option, + pub debug_assertions: bool, + pub overflow_checks: bool, + pub test: bool, +} + +#[derive(Serialize)] +pub struct BuildScript<'a> { + pub package_id: PackageId, + pub linked_libs: &'a [String], + pub linked_paths: &'a [String], + pub cfgs: &'a [String], + pub env: &'a [(String, String)], +} + +impl<'a> Message for BuildScript<'a> { + fn reason(&self) -> &str { + "build-script-executed" + } +} diff --git a/src/cargo/util/mod.rs b/src/cargo/util/mod.rs index b47e8095dd4..424b525f4cc 100644 --- a/src/cargo/util/mod.rs +++ b/src/cargo/util/mod.rs @@ -1,36 +1,89 @@ -pub use self::config::Config; -pub use self::dependency_queue::Dependency; -pub use self::dependency_queue::{DependencyQueue, Fresh, Dirty, Freshness}; -pub use self::errors::{CargoResult, CargoError, ChainError, CliResult}; -pub use self::errors::{CliError, ProcessError}; -pub use self::errors::{Human, caused_human}; -pub use self::errors::{process_error, internal_error, internal, human}; +use std::time::Duration; + +pub use self::cfg::{Cfg, CfgExpr, Platform}; +pub use self::config::{homedir, Config, ConfigValue}; +pub use self::dependency_queue::DependencyQueue; +pub use self::diagnostic_server::RustfixDiagnosticServer; +pub use self::errors::{internal, process_error}; +pub use self::errors::{CargoResult, CargoResultExt, CliResult, Test}; +pub use self::errors::{CargoTestError, CliError, ProcessError}; +pub use self::flock::{FileLock, Filesystem}; pub use self::graph::Graph; -pub use self::hex::{to_hex, short_hash}; -pub use self::lev_distance::{lev_distance}; -pub use self::paths::{join_paths, path2bytes, bytes2path, dylib_path}; -pub use self::paths::{normalize_path, dylib_path_envvar, without_prefix}; +pub use self::hex::{hash_u64, short_hash, to_hex}; +pub use self::into_url::IntoUrl; +pub use self::into_url_with_base::IntoUrlWithBase; +pub use self::lev_distance::{closest, closest_msg, lev_distance}; +pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted}; +pub use self::paths::{bytes2path, dylib_path, join_paths, path2bytes}; +pub use self::paths::{dylib_path_envvar, normalize_path}; pub use self::process_builder::{process, ProcessBuilder}; +pub use self::progress::{Progress, ProgressStyle}; +pub use self::read2::read2; pub use self::rustc::Rustc; pub use self::sha256::Sha256; pub use self::to_semver::ToSemver; -pub use self::to_url::ToUrl; -pub use self::vcs::{GitRepo, HgRepo}; +pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; +pub use self::workspace::{ + print_available_benches, print_available_binaries, print_available_examples, + print_available_tests, +}; +mod cfg; +pub mod command_prelude; pub mod config; +mod dependency_queue; +pub mod diagnostic_server; pub mod errors; +mod flock; pub mod graph; pub mod hex; pub mod important_paths; +pub mod into_url; +mod into_url_with_base; +pub mod job; +pub mod lev_distance; +mod lockserver; +pub mod machine_message; +pub mod network; pub mod paths; pub mod process_builder; pub mod profile; +mod progress; +mod read2; +pub mod rustc; +mod sha256; pub mod to_semver; -pub mod to_url; pub mod toml; -pub mod lev_distance; -mod dependency_queue; -mod sha256; -mod shell_escape; mod vcs; -mod rustc; +mod workspace; + +pub fn elapsed(duration: Duration) -> String { + let secs = duration.as_secs(); + + if secs >= 60 { + format!("{}m {:02}s", secs / 60, secs % 60) + } else { + format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000) + } +} + +/// Check the base requirements for a package name. +/// +/// This can be used for other things than package names, to enforce some +/// level of sanity. Note that package names have other restrictions +/// elsewhere. `cargo new` has a few restrictions, such as checking for +/// reserved names. crates.io has even more restrictions. +pub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<()> { + if let Some(ch) = name + .chars() + .find(|ch| !ch.is_alphanumeric() && *ch != '_' && *ch != '-') + { + failure::bail!("Invalid character `{}` in {}: `{}`{}", ch, what, name, help); + } + Ok(()) +} + +/// Whether or not this running in a Continuous Integration environment. +pub fn is_ci() -> bool { + std::env::var("CI").is_ok() || std::env::var("TF_BUILD").is_ok() +} diff --git a/src/cargo/util/network.rs b/src/cargo/util/network.rs new file mode 100644 index 00000000000..2873dea253b --- /dev/null +++ b/src/cargo/util/network.rs @@ -0,0 +1,135 @@ +use curl; +use git2; + +use failure::Error; + +use crate::util::errors::{CargoResult, HttpNot200}; +use crate::util::Config; + +pub struct Retry<'a> { + config: &'a Config, + remaining: u32, +} + +impl<'a> Retry<'a> { + pub fn new(config: &'a Config) -> CargoResult> { + Ok(Retry { + config, + remaining: config.get::>("net.retry")?.unwrap_or(2), + }) + } + + pub fn r#try(&mut self, f: impl FnOnce() -> CargoResult) -> CargoResult> { + match f() { + Err(ref e) if maybe_spurious(e) && self.remaining > 0 => { + let msg = format!( + "spurious network error ({} tries \ + remaining): {}", + self.remaining, e + ); + self.config.shell().warn(msg)?; + self.remaining -= 1; + Ok(None) + } + other => other.map(Some), + } + } +} + +fn maybe_spurious(err: &Error) -> bool { + for e in err.iter_chain() { + if let Some(git_err) = e.downcast_ref::() { + match git_err.class() { + git2::ErrorClass::Net | git2::ErrorClass::Os => return true, + _ => (), + } + } + if let Some(curl_err) = e.downcast_ref::() { + if curl_err.is_couldnt_connect() + || curl_err.is_couldnt_resolve_proxy() + || curl_err.is_couldnt_resolve_host() + || curl_err.is_operation_timedout() + || curl_err.is_recv_error() + || curl_err.is_http2_stream_error() + { + return true; + } + } + if let Some(not_200) = e.downcast_ref::() { + if 500 <= not_200.code && not_200.code < 600 { + return true; + } + } + } + false +} + +/// Wrapper method for network call retry logic. +/// +/// Retry counts provided by Config object `net.retry`. Config shell outputs +/// a warning on per retry. +/// +/// Closure must return a `CargoResult`. +/// +/// # Examples +/// +/// ```ignore +/// use util::network; +/// cargo_result = network::with_retry(&config, || something.download()); +/// ``` +pub fn with_retry(config: &Config, mut callback: F) -> CargoResult +where + F: FnMut() -> CargoResult, +{ + let mut retry = Retry::new(config)?; + loop { + if let Some(ret) = retry.r#try(&mut callback)? { + return Ok(ret); + } + } +} +#[test] +fn with_retry_repeats_the_call_then_works() { + //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry + let error1 = HttpNot200 { + code: 501, + url: "Uri".to_string(), + } + .into(); + let error2 = HttpNot200 { + code: 502, + url: "Uri".to_string(), + } + .into(); + let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; + let config = Config::default().unwrap(); + let result = with_retry(&config, || results.pop().unwrap()); + assert_eq!(result.unwrap(), ()) +} + +#[test] +fn with_retry_finds_nested_spurious_errors() { + //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry + //String error messages are not considered spurious + let error1 = failure::Error::from(HttpNot200 { + code: 501, + url: "Uri".to_string(), + }); + let error1 = failure::Error::from(error1.context("A non-spurious wrapping err")); + let error2 = failure::Error::from(HttpNot200 { + code: 502, + url: "Uri".to_string(), + }); + let error2 = failure::Error::from(error2.context("A second chained error")); + let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; + let config = Config::default().unwrap(); + let result = with_retry(&config, || results.pop().unwrap()); + assert_eq!(result.unwrap(), ()) +} + +#[test] +fn curle_http2_stream_is_spurious() { + let code = curl_sys::CURLE_HTTP2_STREAM; + let err = curl::Error::new(code); + assert!(maybe_spurious(&err.into())); +} diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs index 7cc7d619e83..8556450ece6 100644 --- a/src/cargo/util/paths.rs +++ b/src/cargo/util/paths.rs @@ -1,24 +1,58 @@ use std::env; use std::ffi::{OsStr, OsString}; -use std::path::{Path, PathBuf, Component}; +use std::fs::{self, File, OpenOptions}; +use std::io; +use std::io::prelude::*; +use std::iter; +use std::path::{Component, Path, PathBuf}; -use util::{human, internal, CargoResult, ChainError}; +use filetime::FileTime; + +use crate::util::errors::{CargoResult, CargoResultExt, Internal}; pub fn join_paths>(paths: &[T], env: &str) -> CargoResult { - env::join_paths(paths.iter()).or_else(|e| { - let paths = paths.iter().map(Path::new).collect::>(); - internal(format!("failed to join path array: {:?}", paths)).chain_error(|| { - human(format!("failed to join search paths together: {}\n\ - Does ${} have an unterminated quote character?", - e, env)) - }) - }) + let err = match env::join_paths(paths.iter()) { + Ok(paths) => return Ok(paths), + Err(e) => e, + }; + let paths = paths.iter().map(Path::new).collect::>(); + let err = failure::Error::from(err); + let explain = Internal::new(failure::format_err!( + "failed to join path array: {:?}", + paths + )); + let err = failure::Error::from(err.context(explain)); + let more_explain = format!( + "failed to join search paths together\n\ + Does ${} have an unterminated quote character?", + env + ); + Err(err.context(more_explain).into()) } pub fn dylib_path_envvar() -> &'static str { - if cfg!(windows) {"PATH"} - else if cfg!(target_os = "macos") {"DYLD_LIBRARY_PATH"} - else {"LD_LIBRARY_PATH"} + if cfg!(windows) { + "PATH" + } else if cfg!(target_os = "macos") { + // When loading and linking a dynamic library or bundle, dlopen + // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and + // DYLD_FALLBACK_LIBRARY_PATH. + // In the Mach-O format, a dynamic library has an "install path." + // Clients linking against the library record this path, and the + // dynamic linker, dyld, uses it to locate the library. + // dyld searches DYLD_LIBRARY_PATH *before* the install path. + // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot + // find the library in the install path. + // Setting DYLD_LIBRARY_PATH can easily have unintended + // consequences. + // + // Also, DYLD_LIBRARY_PATH appears to have significant performance + // penalty starting in 10.13. Cargo's testsuite ran more than twice as + // slow with it on CI. + "DYLD_FALLBACK_LIBRARY_PATH" + } else { + "LD_LIBRARY_PATH" + } } pub fn dylib_path() -> Vec { @@ -30,8 +64,7 @@ pub fn dylib_path() -> Vec { pub fn normalize_path(path: &Path) -> PathBuf { let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek() - .cloned() { + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { components.next(); PathBuf::from(c.as_os_str()) } else { @@ -41,29 +74,132 @@ pub fn normalize_path(path: &Path) -> PathBuf { for component in components { match component { Component::Prefix(..) => unreachable!(), - Component::RootDir => { ret.push(component.as_os_str()); } + Component::RootDir => { + ret.push(component.as_os_str()); + } Component::CurDir => {} - Component::ParentDir => { ret.pop(); } - Component::Normal(c) => { ret.push(c); } + Component::ParentDir => { + ret.pop(); + } + Component::Normal(c) => { + ret.push(c); + } } } ret } -pub fn without_prefix<'a>(a: &'a Path, b: &'a Path) -> Option<&'a Path> { - let mut a = a.components(); - let mut b = b.components(); - loop { - match b.next() { - Some(y) => match a.next() { - Some(x) if x == y => continue, - _ => return None, - }, - None => return Some(a.as_path()), +pub fn resolve_executable(exec: &Path) -> CargoResult { + if exec.components().count() == 1 { + let paths = env::var_os("PATH").ok_or_else(|| failure::format_err!("no PATH"))?; + let candidates = env::split_paths(&paths).flat_map(|path| { + let candidate = path.join(&exec); + let with_exe = if env::consts::EXE_EXTENSION == "" { + None + } else { + Some(candidate.with_extension(env::consts::EXE_EXTENSION)) + }; + iter::once(candidate).chain(with_exe) + }); + for candidate in candidates { + if candidate.is_file() { + // PATH may have a component like "." in it, so we still need to + // canonicalize. + return Ok(candidate.canonicalize()?); + } } + + failure::bail!("no executable for `{}` found in PATH", exec.display()) + } else { + Ok(exec.canonicalize()?) + } +} + +pub fn read(path: &Path) -> CargoResult { + match String::from_utf8(read_bytes(path)?) { + Ok(s) => Ok(s), + Err(_) => failure::bail!("path at `{}` was not valid utf-8", path.display()), } } +pub fn read_bytes(path: &Path) -> CargoResult> { + let res = (|| -> CargoResult<_> { + let mut ret = Vec::new(); + let mut f = File::open(path)?; + if let Ok(m) = f.metadata() { + ret.reserve(m.len() as usize + 1); + } + f.read_to_end(&mut ret)?; + Ok(ret) + })() + .chain_err(|| format!("failed to read `{}`", path.display()))?; + Ok(res) +} + +pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> { + (|| -> CargoResult<()> { + let mut f = File::create(path)?; + f.write_all(contents)?; + Ok(()) + })() + .chain_err(|| format!("failed to write `{}`", path.display()))?; + Ok(()) +} + +pub fn write_if_changed, C: AsRef<[u8]>>(path: P, contents: C) -> CargoResult<()> { + (|| -> CargoResult<()> { + let contents = contents.as_ref(); + let mut f = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .open(&path)?; + let mut orig = Vec::new(); + f.read_to_end(&mut orig)?; + if orig != contents { + f.set_len(0)?; + f.seek(io::SeekFrom::Start(0))?; + f.write_all(contents)?; + } + Ok(()) + })() + .chain_err(|| format!("failed to write `{}`", path.as_ref().display()))?; + Ok(()) +} + +pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> { + (|| -> CargoResult<()> { + let mut f = OpenOptions::new() + .write(true) + .append(true) + .create(true) + .open(path)?; + + f.write_all(contents)?; + Ok(()) + })() + .chain_err(|| format!("failed to write `{}`", path.display()))?; + Ok(()) +} + +pub fn mtime(path: &Path) -> CargoResult { + let meta = fs::metadata(path).chain_err(|| format!("failed to stat `{}`", path.display()))?; + Ok(FileTime::from_last_modification_time(&meta)) +} + +/// Record the current time on the filesystem (using the filesystem's clock) +/// using a file at the given directory. Returns the current time. +pub fn set_invocation_time(path: &Path) -> CargoResult { + // note that if `FileTime::from_system_time(SystemTime::now());` is determined to be sufficient, + // then this can be removed. + let timestamp = path.join("invoked.timestamp"); + write( + ×tamp, + b"This file has an mtime of when this was started.", + )?; + mtime(×tamp) +} + #[cfg(unix)] pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { use std::os::unix::prelude::*; @@ -73,15 +209,16 @@ pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { pub fn path2bytes(path: &Path) -> CargoResult<&[u8]> { match path.as_os_str().to_str() { Some(s) => Ok(s.as_bytes()), - None => Err(human(format!("invalid non-unicode path: {}", - path.display()))) + None => Err(failure::format_err!( + "invalid non-unicode path: {}", + path.display() + )), } } #[cfg(unix)] pub fn bytes2path(bytes: &[u8]) -> CargoResult { use std::os::unix::prelude::*; - use std::ffi::OsStr; Ok(PathBuf::from(OsStr::from_bytes(bytes))) } #[cfg(windows)] @@ -89,6 +226,108 @@ pub fn bytes2path(bytes: &[u8]) -> CargoResult { use std::str; match str::from_utf8(bytes) { Ok(s) => Ok(PathBuf::from(s)), - Err(..) => Err(human("invalid non-unicode path")), + Err(..) => Err(failure::format_err!("invalid non-unicode path")), + } +} + +pub fn ancestors(path: &Path) -> PathAncestors<'_> { + PathAncestors::new(path) +} + +pub struct PathAncestors<'a> { + current: Option<&'a Path>, + stop_at: Option, +} + +impl<'a> PathAncestors<'a> { + fn new(path: &Path) -> PathAncestors<'_> { + PathAncestors { + current: Some(path), + //HACK: avoid reading `~/.cargo/config` when testing Cargo itself. + stop_at: env::var("__CARGO_TEST_ROOT").ok().map(PathBuf::from), + } + } +} + +impl<'a> Iterator for PathAncestors<'a> { + type Item = &'a Path; + + fn next(&mut self) -> Option<&'a Path> { + if let Some(path) = self.current { + self.current = path.parent(); + + if let Some(ref stop_at) = self.stop_at { + if path == stop_at { + self.current = None; + } + } + + Some(path) + } else { + None + } + } +} + +pub fn remove_dir_all>(p: P) -> CargoResult<()> { + _remove_dir_all(p.as_ref()) +} + +fn _remove_dir_all(p: &Path) -> CargoResult<()> { + if p.symlink_metadata()?.file_type().is_symlink() { + return remove_file(p); + } + let entries = p + .read_dir() + .chain_err(|| format!("failed to read directory `{}`", p.display()))?; + for entry in entries { + let entry = entry?; + let path = entry.path(); + if entry.file_type()?.is_dir() { + remove_dir_all(&path)?; + } else { + remove_file(&path)?; + } + } + remove_dir(&p) +} + +pub fn remove_dir>(p: P) -> CargoResult<()> { + _remove_dir(p.as_ref()) +} + +fn _remove_dir(p: &Path) -> CargoResult<()> { + fs::remove_dir(p).chain_err(|| format!("failed to remove directory `{}`", p.display()))?; + Ok(()) +} + +pub fn remove_file>(p: P) -> CargoResult<()> { + _remove_file(p.as_ref()) +} + +fn _remove_file(p: &Path) -> CargoResult<()> { + let mut err = match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => e, + }; + + if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) { + match fs::remove_file(p) { + Ok(()) => return Ok(()), + Err(e) => err = e, + } + } + + Err(err).chain_err(|| format!("failed to remove file `{}`", p.display()))?; + Ok(()) +} + +fn set_not_readonly(p: &Path) -> io::Result { + let mut perms = p.metadata()?.permissions(); + if !perms.readonly() { + return Ok(false); } + perms.set_readonly(false); + fs::set_permissions(p, perms)?; + Ok(true) } diff --git a/src/cargo/util/process_builder.rs b/src/cargo/util/process_builder.rs index 628b681c1f1..9ac8ff1cbcd 100644 --- a/src/cargo/util/process_builder.rs +++ b/src/cargo/util/process_builder.rs @@ -1,27 +1,57 @@ use std::collections::HashMap; use std::env; -use std::ffi::{OsString, OsStr}; +use std::ffi::{OsStr, OsString}; use std::fmt; use std::path::Path; -use std::process::{Command, Output}; +use std::process::{Command, Output, Stdio}; -use util::{CargoResult, ProcessError, process_error}; -use util::shell_escape::escape; +use failure::Fail; +use jobserver::Client; +use shell_escape::escape; -#[derive(Clone, PartialEq, Debug)] +use crate::util::{process_error, read2, CargoResult, CargoResultExt}; + +/// A builder object for an external process, similar to `std::process::Command`. +#[derive(Clone, Debug)] pub struct ProcessBuilder { + /// The program to execute. program: OsString, + /// A list of arguments to pass to the program. args: Vec, + /// Any environment variables that should be set for the program. env: HashMap>, - cwd: OsString, + /// The directory to run the program from. + cwd: Option, + /// The `make` jobserver. See the [jobserver crate][jobserver_docs] for + /// more information. + /// + /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ + jobserver: Option, + /// `true` to include environment variable in display. + display_env_vars: bool, } impl fmt::Display for ProcessBuilder { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "`{}", self.program.to_string_lossy())); + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "`")?; - for arg in self.args.iter() { - try!(write!(f, " {}", escape(arg.to_string_lossy()))); + if self.display_env_vars { + for (key, val) in self.env.iter() { + if let Some(val) = val { + let val = escape(val.to_string_lossy()); + if cfg!(windows) { + write!(f, "set {}={}&& ", key, val)?; + } else { + write!(f, "{}={} ", key, val)?; + } + } + } + } + + write!(f, "{}", self.program.to_string_lossy())?; + + for arg in &self.args { + write!(f, " {}", escape(arg.to_string_lossy()))?; } write!(f, "`") @@ -29,111 +59,329 @@ impl fmt::Display for ProcessBuilder { } impl ProcessBuilder { + /// (chainable) Sets the executable for the process. + pub fn program>(&mut self, program: T) -> &mut ProcessBuilder { + self.program = program.as_ref().to_os_string(); + self + } + + /// (chainable) Adds `arg` to the args list. pub fn arg>(&mut self, arg: T) -> &mut ProcessBuilder { self.args.push(arg.as_ref().to_os_string()); self } - pub fn args>(&mut self, arguments: &[T]) -> &mut ProcessBuilder { - self.args.extend(arguments.iter().map(|t| { - t.as_ref().to_os_string() - })); + /// (chainable) Adds multiple `args` to the args list. + pub fn args>(&mut self, args: &[T]) -> &mut ProcessBuilder { + self.args + .extend(args.iter().map(|t| t.as_ref().to_os_string())); + self + } + + /// (chainable) Replaces the args list with the given `args`. + pub fn args_replace>(&mut self, args: &[T]) -> &mut ProcessBuilder { + self.args = args.iter().map(|t| t.as_ref().to_os_string()).collect(); self } + /// (chainable) Sets the current working directory of the process. pub fn cwd>(&mut self, path: T) -> &mut ProcessBuilder { - self.cwd = path.as_ref().to_os_string(); + self.cwd = Some(path.as_ref().to_os_string()); self } - pub fn env>(&mut self, key: &str, - val: T) -> &mut ProcessBuilder { - self.env.insert(key.to_string(), Some(val.as_ref().to_os_string())); + /// (chainable) Sets an environment variable for the process. + pub fn env>(&mut self, key: &str, val: T) -> &mut ProcessBuilder { + self.env + .insert(key.to_string(), Some(val.as_ref().to_os_string())); self } + /// (chainable) Unsets an environment variable for the process. pub fn env_remove(&mut self, key: &str) -> &mut ProcessBuilder { self.env.insert(key.to_string(), None); self } + /// Gets the executable name. + pub fn get_program(&self) -> &OsString { + &self.program + } + + /// Gets the program arguments. pub fn get_args(&self) -> &[OsString] { &self.args } - pub fn get_cwd(&self) -> &Path { Path::new(&self.cwd) } + /// Gets the current working directory for the process. + pub fn get_cwd(&self) -> Option<&Path> { + self.cwd.as_ref().map(Path::new) + } + + /// Gets an environment variable as the process will see it (will inherit from environment + /// unless explicitally unset). pub fn get_env(&self, var: &str) -> Option { - self.env.get(var).cloned().or_else(|| Some(env::var_os(var))) + self.env + .get(var) + .cloned() + .or_else(|| Some(env::var_os(var))) .and_then(|s| s) } - pub fn get_envs(&self) -> &HashMap> { &self.env } + /// Gets all environment variables explicitly set or unset for the process (not inherited + /// vars). + pub fn get_envs(&self) -> &HashMap> { + &self.env + } - pub fn exec(&self) -> Result<(), ProcessError> { + /// Sets the `make` jobserver. See the [jobserver crate][jobserver_docs] for + /// more information. + /// + /// [jobserver_docs]: https://docs.rs/jobserver/0.1.6/jobserver/ + pub fn inherit_jobserver(&mut self, jobserver: &Client) -> &mut Self { + self.jobserver = Some(jobserver.clone()); + self + } + + /// Enables environment variable display. + pub fn display_env_vars(&mut self) -> &mut Self { + self.display_env_vars = true; + self + } + + /// Runs the process, waiting for completion, and mapping non-success exit codes to an error. + pub fn exec(&self) -> CargoResult<()> { let mut command = self.build_command(); - let exit = try!(command.status().map_err(|e| { - process_error(&format!("Could not execute process `{}`", - self.debug_string()), - Some(e), None, None) - })); + let exit = command.status().chain_err(|| { + process_error(&format!("could not execute process {}", self), None, None) + })?; if exit.success() { Ok(()) } else { - Err(process_error(&format!("Process didn't exit successfully: `{}`", - self.debug_string()), - None, Some(&exit), None)) + Err(process_error( + &format!("process didn't exit successfully: {}", self), + Some(exit), + None, + ) + .into()) } } - pub fn exec_with_output(&self) -> Result { + /// Replaces the current process with the target process. + /// + /// On Unix, this executes the process using the Unix syscall `execvp`, which will block + /// this process, and will only return if there is an error. + /// + /// On Windows this isn't technically possible. Instead we emulate it to the best of our + /// ability. One aspect we fix here is that we specify a handler for the Ctrl-C handler. + /// In doing so (and by effectively ignoring it) we should emulate proxying Ctrl-C + /// handling to the application at hand, which will either terminate or handle it itself. + /// According to Microsoft's documentation at + /// . + /// the Ctrl-C signal is sent to all processes attached to a terminal, which should + /// include our child process. If the child terminates then we'll reap them in Cargo + /// pretty quickly, and if the child handles the signal then we won't terminate + /// (and we shouldn't!) until the process itself later exits. + pub fn exec_replace(&self) -> CargoResult<()> { + imp::exec_replace(self) + } + + /// Executes the process, returning the stdio output, or an error if non-zero exit status. + pub fn exec_with_output(&self) -> CargoResult { let mut command = self.build_command(); - let output = try!(command.output().map_err(|e| { - process_error(&format!("Could not execute process `{}`", - self.debug_string()), - Some(e), None, None) - })); + let output = command.output().chain_err(|| { + process_error(&format!("could not execute process {}", self), None, None) + })?; if output.status.success() { Ok(output) } else { - Err(process_error(&format!("Process didn't exit successfully: `{}`", - self.debug_string()), - None, Some(&output.status), Some(&output))) + Err(process_error( + &format!("process didn't exit successfully: {}", self), + Some(output.status), + Some(&output), + ) + .into()) } } + /// Executes a command, passing each line of stdout and stderr to the supplied callbacks, which + /// can mutate the string data. + /// + /// If any invocations of these function return an error, it will be propagated. + /// + /// If `capture_output` is true, then all the output will also be buffered + /// and stored in the returned `Output` object. If it is false, no caching + /// is done, and the callbacks are solely responsible for handling the + /// output. + pub fn exec_with_streaming( + &self, + on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, + on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, + capture_output: bool, + ) -> CargoResult { + let mut stdout = Vec::new(); + let mut stderr = Vec::new(); + + let mut cmd = self.build_command(); + cmd.stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .stdin(Stdio::null()); + + let mut callback_error = None; + let status = (|| { + let mut child = cmd.spawn()?; + let out = child.stdout.take().unwrap(); + let err = child.stderr.take().unwrap(); + read2(out, err, &mut |is_out, data, eof| { + let idx = if eof { + data.len() + } else { + match data.iter().rposition(|b| *b == b'\n') { + Some(i) => i + 1, + None => return, + } + }; + { + // scope for new_lines + let new_lines = if capture_output { + let dst = if is_out { &mut stdout } else { &mut stderr }; + let start = dst.len(); + let data = data.drain(..idx); + dst.extend(data); + &dst[start..] + } else { + &data[..idx] + }; + for line in String::from_utf8_lossy(new_lines).lines() { + if callback_error.is_some() { + break; + } + let callback_result = if is_out { + on_stdout_line(line) + } else { + on_stderr_line(line) + }; + if let Err(e) = callback_result { + callback_error = Some(e); + } + } + } + if !capture_output { + data.drain(..idx); + } + })?; + child.wait() + })() + .chain_err(|| process_error(&format!("could not execute process {}", self), None, None))?; + let output = Output { + stdout, + stderr, + status, + }; + + { + let to_print = if capture_output { Some(&output) } else { None }; + if let Some(e) = callback_error { + let cx = process_error( + &format!("failed to parse process output: {}", self), + Some(output.status), + to_print, + ); + return Err(cx.context(e).into()); + } else if !output.status.success() { + return Err(process_error( + &format!("process didn't exit successfully: {}", self), + Some(output.status), + to_print, + ) + .into()); + } + } + + Ok(output) + } + + /// Converts `ProcessBuilder` into a `std::process::Command`, and handles the jobserver, if + /// present. pub fn build_command(&self) -> Command { let mut command = Command::new(&self.program); - command.current_dir(&self.cwd); - for arg in self.args.iter() { + if let Some(cwd) = self.get_cwd() { + command.current_dir(cwd); + } + for arg in &self.args { command.arg(arg); } - for (k, v) in self.env.iter() { + for (k, v) in &self.env { match *v { - Some(ref v) => { command.env(k, v); } - None => { command.env_remove(k); } + Some(ref v) => { + command.env(k, v); + } + None => { + command.env_remove(k); + } } } - command - } - - fn debug_string(&self) -> String { - let mut program = format!("{}", self.program.to_string_lossy()); - for arg in self.args.iter() { - program.push(' '); - program.push_str(&format!("{}", arg.to_string_lossy())); + if let Some(ref c) = self.jobserver { + c.configure(&mut command); } - program + command } } -pub fn process>(cmd: T) -> CargoResult { - Ok(ProcessBuilder { +/// A helper function to create a `ProcessBuilder`. +pub fn process>(cmd: T) -> ProcessBuilder { + ProcessBuilder { program: cmd.as_ref().to_os_string(), args: Vec::new(), - cwd: try!(env::current_dir()).as_os_str().to_os_string(), + cwd: None, env: HashMap::new(), - }) + jobserver: None, + display_env_vars: false, + } +} + +#[cfg(unix)] +mod imp { + use crate::util::{process_error, ProcessBuilder}; + use crate::CargoResult; + use std::os::unix::process::CommandExt; + + pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> { + let mut command = process_builder.build_command(); + let error = command.exec(); + Err(failure::Error::from(error) + .context(process_error( + &format!("could not execute process {}", process_builder), + None, + None, + )) + .into()) + } +} + +#[cfg(windows)] +mod imp { + use crate::util::{process_error, ProcessBuilder}; + use crate::CargoResult; + use winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE}; + use winapi::um::consoleapi::SetConsoleCtrlHandler; + + unsafe extern "system" fn ctrlc_handler(_: DWORD) -> BOOL { + // Do nothing; let the child process handle it. + TRUE + } + + pub fn exec_replace(process_builder: &ProcessBuilder) -> CargoResult<()> { + unsafe { + if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE { + return Err(process_error("Could not set Ctrl-C handler.", None, None).into()); + } + } + + // Just execute the process as normal. + process_builder.exec() + } } diff --git a/src/cargo/util/profile.rs b/src/cargo/util/profile.rs index 59cc83f45dc..b450d12058e 100644 --- a/src/cargo/util/profile.rs +++ b/src/cargo/util/profile.rs @@ -1,11 +1,12 @@ +use std::cell::RefCell; use std::env; use std::fmt; -use std::mem; -use time; +use std::io::{stdout, StdoutLock, Write}; use std::iter::repeat; -use std::cell::RefCell; +use std::mem; +use std::time; -thread_local!(static PROFILE_STACK: RefCell> = RefCell::new(Vec::new())); +thread_local!(static PROFILE_STACK: RefCell> = RefCell::new(Vec::new())); thread_local!(static MESSAGES: RefCell> = RefCell::new(Vec::new())); type Message = (usize, u64, String); @@ -19,9 +20,13 @@ fn enabled_level() -> Option { } pub fn start(desc: T) -> Profiler { - if enabled_level().is_none() { return Profiler { desc: String::new() } } + if enabled_level().is_none() { + return Profiler { + desc: String::new(), + }; + } - PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::precise_time_ns())); + PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::Instant::now())); Profiler { desc: desc.to_string(), @@ -35,35 +40,49 @@ impl Drop for Profiler { None => return, }; - let start = PROFILE_STACK.with(|stack| stack.borrow_mut().pop().unwrap()); - let end = time::precise_time_ns(); + let (start, stack_len) = PROFILE_STACK.with(|stack| { + let mut stack = stack.borrow_mut(); + let start = stack.pop().unwrap(); + (start, stack.len()) + }); + let duration = start.elapsed(); + let duration_ms = duration.as_secs() * 1000 + u64::from(duration.subsec_millis()); + + let msg = ( + stack_len, + duration_ms, + mem::replace(&mut self.desc, String::new()), + ); + MESSAGES.with(|msgs| msgs.borrow_mut().push(msg)); - let stack_len = PROFILE_STACK.with(|stack| stack.borrow().len()); if stack_len == 0 { - fn print(lvl: usize, msgs: &[Message], enabled: usize) { - if lvl > enabled { return } + fn print(lvl: usize, msgs: &[Message], enabled: usize, stdout: &mut StdoutLock<'_>) { + if lvl > enabled { + return; + } let mut last = 0; for (i, &(l, time, ref msg)) in msgs.iter().enumerate() { - if l != lvl { continue } - println!("{} {:6}ms - {}", - repeat(" ").take(lvl + 1).collect::(), - time / 1000000, msg); + if l != lvl { + continue; + } + writeln!( + stdout, + "{} {:6}ms - {}", + repeat(" ").take(lvl + 1).collect::(), + time, + msg + ) + .expect("printing profiling info to stdout"); - print(lvl + 1, &msgs[last..i], enabled); + print(lvl + 1, &msgs[last..i], enabled, stdout); last = i; } - } - MESSAGES.with(|msgs_rc| { - let mut msgs = msgs_rc.borrow_mut(); - msgs.push((0, end - start, - mem::replace(&mut self.desc, String::new()))); - print(0, &msgs, enabled); - }); - } else { + let stdout = stdout(); MESSAGES.with(|msgs| { - let msg = mem::replace(&mut self.desc, String::new()); - msgs.borrow_mut().push((stack_len, end - start, msg)); + let mut msgs = msgs.borrow_mut(); + print(0, &msgs, enabled, &mut stdout.lock()); + msgs.clear(); }); } } diff --git a/src/cargo/util/progress.rs b/src/cargo/util/progress.rs new file mode 100644 index 00000000000..42a6f162f6b --- /dev/null +++ b/src/cargo/util/progress.rs @@ -0,0 +1,420 @@ +use std::cmp; +use std::env; +use std::time::{Duration, Instant}; + +use crate::core::shell::Verbosity; +use crate::util::{is_ci, CargoResult, Config}; + +use unicode_width::UnicodeWidthChar; + +pub struct Progress<'cfg> { + state: Option>, +} + +pub enum ProgressStyle { + Percentage, + Ratio, +} + +struct Throttle { + first: bool, + last_update: Instant, +} + +struct State<'cfg> { + config: &'cfg Config, + format: Format, + name: String, + done: bool, + throttle: Throttle, + last_line: Option, +} + +struct Format { + style: ProgressStyle, + max_width: usize, + max_print: usize, +} + +impl<'cfg> Progress<'cfg> { + pub fn with_style(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> { + // report no progress when -q (for quiet) or TERM=dumb are set + // or if running on Continuous Integration service like Travis where the + // output logs get mangled. + let dumb = match env::var("TERM") { + Ok(term) => term == "dumb", + Err(_) => false, + }; + if cfg.shell().verbosity() == Verbosity::Quiet || dumb || is_ci() { + return Progress { state: None }; + } + + Progress { + state: cfg.shell().err_width().map(|n| State { + config: cfg, + format: Format { + style, + max_width: n, + max_print: 80, + }, + name: name.to_string(), + done: false, + throttle: Throttle::new(), + last_line: None, + }), + } + } + + pub fn disable(&mut self) { + self.state = None; + } + + pub fn is_enabled(&self) -> bool { + self.state.is_some() + } + + pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> { + Self::with_style(name, ProgressStyle::Percentage, cfg) + } + + pub fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> { + let s = match &mut self.state { + Some(s) => s, + None => return Ok(()), + }; + + // Don't update too often as it can cause excessive performance loss + // just putting stuff onto the terminal. We also want to avoid + // flickering by not drawing anything that goes away too quickly. As a + // result we've got two branches here: + // + // 1. If we haven't drawn anything, we wait for a period of time to + // actually start drawing to the console. This ensures that + // short-lived operations don't flicker on the console. Currently + // there's a 500ms delay to when we first draw something. + // 2. If we've drawn something, then we rate limit ourselves to only + // draw to the console every so often. Currently there's a 100ms + // delay between updates. + if !s.throttle.allowed() { + return Ok(()); + } + + s.tick(cur, max, "") + } + + pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> { + match self.state { + Some(ref mut s) => s.tick(cur, max, msg), + None => Ok(()), + } + } + + pub fn update_allowed(&mut self) -> bool { + match &mut self.state { + Some(s) => s.throttle.allowed(), + None => false, + } + } + + pub fn print_now(&mut self, msg: &str) -> CargoResult<()> { + match &mut self.state { + Some(s) => s.print("", msg), + None => Ok(()), + } + } + + pub fn clear(&mut self) { + if let Some(ref mut s) = self.state { + s.clear(); + } + } +} + +impl Throttle { + fn new() -> Throttle { + Throttle { + first: true, + last_update: Instant::now(), + } + } + + fn allowed(&mut self) -> bool { + if self.first { + let delay = Duration::from_millis(500); + if self.last_update.elapsed() < delay { + return false; + } + } else { + let interval = Duration::from_millis(100); + if self.last_update.elapsed() < interval { + return false; + } + } + self.update(); + true + } + + fn update(&mut self) { + self.first = false; + self.last_update = Instant::now(); + } +} + +impl<'cfg> State<'cfg> { + fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> { + if self.done { + return Ok(()); + } + + if max > 0 && cur == max { + self.done = true; + } + + // Write out a pretty header, then the progress bar itself, and then + // return back to the beginning of the line for the next print. + self.try_update_max_width(); + if let Some(pbar) = self.format.progress(cur, max) { + self.print(&pbar, msg)?; + } + Ok(()) + } + + fn print(&mut self, prefix: &str, msg: &str) -> CargoResult<()> { + self.throttle.update(); + self.try_update_max_width(); + + // make sure we have enough room for the header + if self.format.max_width < 15 { + return Ok(()); + } + + let mut line = prefix.to_string(); + self.format.render(&mut line, msg); + while line.len() < self.format.max_width - 15 { + line.push(' '); + } + + // Only update if the line has changed. + if self.config.shell().is_cleared() || self.last_line.as_ref() != Some(&line) { + let mut shell = self.config.shell(); + shell.set_needs_clear(false); + shell.status_header(&self.name)?; + write!(shell.err(), "{}\r", line)?; + self.last_line = Some(line); + shell.set_needs_clear(true); + } + + Ok(()) + } + + fn clear(&mut self) { + // No need to clear if the progress is not currently being displayed. + if self.last_line.is_some() && !self.config.shell().is_cleared() { + self.config.shell().err_erase_line(); + self.last_line = None; + } + } + + fn try_update_max_width(&mut self) { + if let Some(n) = self.config.shell().err_width() { + self.format.max_width = n; + } + } +} + +impl Format { + fn progress(&self, cur: usize, max: usize) -> Option { + // Render the percentage at the far right and then figure how long the + // progress bar is + let pct = (cur as f64) / (max as f64); + let pct = if !pct.is_finite() { 0.0 } else { pct }; + let stats = match self.style { + ProgressStyle::Percentage => format!(" {:6.02}%", pct * 100.0), + ProgressStyle::Ratio => format!(" {}/{}", cur, max), + }; + let extra_len = stats.len() + 2 /* [ and ] */ + 15 /* status header */; + let display_width = match self.width().checked_sub(extra_len) { + Some(n) => n, + None => return None, + }; + + let mut string = String::with_capacity(self.max_width); + string.push('['); + let hashes = display_width as f64 * pct; + let hashes = hashes as usize; + + // Draw the `===>` + if hashes > 0 { + for _ in 0..hashes - 1 { + string.push_str("="); + } + if cur == max { + string.push_str("="); + } else { + string.push_str(">"); + } + } + + // Draw the empty space we have left to do + for _ in 0..(display_width - hashes) { + string.push_str(" "); + } + string.push_str("]"); + string.push_str(&stats); + + Some(string) + } + + fn render(&self, string: &mut String, msg: &str) { + let mut avail_msg_len = self.max_width - string.len() - 15; + let mut ellipsis_pos = 0; + if avail_msg_len <= 3 { + return; + } + for c in msg.chars() { + let display_width = c.width().unwrap_or(0); + if avail_msg_len >= display_width { + avail_msg_len -= display_width; + string.push(c); + if avail_msg_len >= 3 { + ellipsis_pos = string.len(); + } + } else { + string.truncate(ellipsis_pos); + string.push_str("..."); + break; + } + } + } + + #[cfg(test)] + fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option { + let mut ret = self.progress(cur, max)?; + self.render(&mut ret, msg); + Some(ret) + } + + fn width(&self) -> usize { + cmp::min(self.max_width, self.max_print) + } +} + +impl<'cfg> Drop for State<'cfg> { + fn drop(&mut self) { + self.clear(); + } +} + +#[test] +fn test_progress_status() { + let format = Format { + style: ProgressStyle::Ratio, + max_print: 40, + max_width: 60, + }; + assert_eq!( + format.progress_status(0, 4, ""), + Some("[ ] 0/4".to_string()) + ); + assert_eq!( + format.progress_status(1, 4, ""), + Some("[===> ] 1/4".to_string()) + ); + assert_eq!( + format.progress_status(2, 4, ""), + Some("[========> ] 2/4".to_string()) + ); + assert_eq!( + format.progress_status(3, 4, ""), + Some("[=============> ] 3/4".to_string()) + ); + assert_eq!( + format.progress_status(4, 4, ""), + Some("[===================] 4/4".to_string()) + ); + + assert_eq!( + format.progress_status(3999, 4000, ""), + Some("[===========> ] 3999/4000".to_string()) + ); + assert_eq!( + format.progress_status(4000, 4000, ""), + Some("[=============] 4000/4000".to_string()) + ); + + assert_eq!( + format.progress_status(3, 4, ": short message"), + Some("[=============> ] 3/4: short message".to_string()) + ); + assert_eq!( + format.progress_status(3, 4, ": msg thats just fit"), + Some("[=============> ] 3/4: msg thats just fit".to_string()) + ); + assert_eq!( + format.progress_status(3, 4, ": msg that's just fit"), + Some("[=============> ] 3/4: msg that's just...".to_string()) + ); + + // combining diacritics have width zero and thus can fit max_width. + let zalgo_msg = + "z̸̧̢̗͉̝̦͍̱ͧͦͨ̑̅̌ͥ́͢a̢ͬͨ̽ͯ̅̑ͥ͋̏̑ͫ̄͢͏̫̝̪̤͎̱̣͍̭̞̙̱͙͍̘̭͚l̶̡̛̥̝̰̭̹̯̯̞̪͇̱̦͙͔̘̼͇͓̈ͨ͗ͧ̓͒ͦ̀̇ͣ̈ͭ͊͛̃̑͒̿̕͜g̸̷̢̩̻̻͚̠͓̞̥͐ͩ͌̑ͥ̊̽͋͐̐͌͛̐̇̑ͨ́ͅo͙̳̣͔̰̠̜͕͕̞̦̙̭̜̯̹̬̻̓͑ͦ͋̈̉͌̃ͯ̀̂͠ͅ ̸̡͎̦̲̖̤̺̜̮̱̰̥͔̯̅̏ͬ̂ͨ̋̃̽̈́̾̔̇ͣ̚͜͜h̡ͫ̐̅̿̍̀͜҉̛͇̭̹̰̠͙̞ẽ̶̙̹̳̖͉͎̦͂̋̓ͮ̔ͬ̐̀͂̌͑̒͆̚͜͠ ͓͓̟͍̮̬̝̝̰͓͎̼̻ͦ͐̾̔͒̃̓͟͟c̮̦͍̺͈͚̯͕̄̒͐̂͊̊͗͊ͤͣ̀͘̕͝͞o̶͍͚͍̣̮͌ͦ̽̑ͩ̅ͮ̐̽̏͗́͂̅ͪ͠m̷̧͖̻͔̥̪̭͉͉̤̻͖̩̤͖̘ͦ̂͌̆̂ͦ̒͊ͯͬ͊̉̌ͬ͝͡e̵̹̣͍̜̺̤̤̯̫̹̠̮͎͙̯͚̰̼͗͐̀̒͂̉̀̚͝͞s̵̲͍͙͖̪͓͓̺̱̭̩̣͖̣ͤͤ͂̎̈͗͆ͨͪ̆̈͗͝͠"; + assert_eq!( + format.progress_status(3, 4, zalgo_msg), + Some("[=============> ] 3/4".to_string() + zalgo_msg) + ); + + // some non-ASCII ellipsize test + assert_eq!( + format.progress_status(3, 4, "_123456789123456e\u{301}\u{301}8\u{301}90a"), + Some("[=============> ] 3/4_123456789123456e\u{301}\u{301}...".to_string()) + ); + assert_eq!( + format.progress_status(3, 4, ":每個漢字佔據了兩個字元"), + Some("[=============> ] 3/4:每個漢字佔據了...".to_string()) + ); +} + +#[test] +fn test_progress_status_percentage() { + let format = Format { + style: ProgressStyle::Percentage, + max_print: 40, + max_width: 60, + }; + assert_eq!( + format.progress_status(0, 77, ""), + Some("[ ] 0.00%".to_string()) + ); + assert_eq!( + format.progress_status(1, 77, ""), + Some("[ ] 1.30%".to_string()) + ); + assert_eq!( + format.progress_status(76, 77, ""), + Some("[=============> ] 98.70%".to_string()) + ); + assert_eq!( + format.progress_status(77, 77, ""), + Some("[===============] 100.00%".to_string()) + ); +} + +#[test] +fn test_progress_status_too_short() { + let format = Format { + style: ProgressStyle::Percentage, + max_print: 25, + max_width: 25, + }; + assert_eq!( + format.progress_status(1, 1, ""), + Some("[] 100.00%".to_string()) + ); + + let format = Format { + style: ProgressStyle::Percentage, + max_print: 24, + max_width: 24, + }; + assert_eq!(format.progress_status(1, 1, ""), None); +} diff --git a/src/cargo/util/read2.rs b/src/cargo/util/read2.rs new file mode 100644 index 00000000000..bfa2427976d --- /dev/null +++ b/src/cargo/util/read2.rs @@ -0,0 +1,179 @@ +pub use self::imp::read2; + +#[cfg(unix)] +mod imp { + use libc; + use std::io; + use std::io::prelude::*; + use std::mem; + use std::os::unix::prelude::*; + use std::process::{ChildStderr, ChildStdout}; + + pub fn read2( + mut out_pipe: ChildStdout, + mut err_pipe: ChildStderr, + data: &mut dyn FnMut(bool, &mut Vec, bool), + ) -> io::Result<()> { + unsafe { + libc::fcntl(out_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + libc::fcntl(err_pipe.as_raw_fd(), libc::F_SETFL, libc::O_NONBLOCK); + } + + let mut out_done = false; + let mut err_done = false; + let mut out = Vec::new(); + let mut err = Vec::new(); + + let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; + fds[0].fd = out_pipe.as_raw_fd(); + fds[0].events = libc::POLLIN; + fds[1].fd = err_pipe.as_raw_fd(); + fds[1].events = libc::POLLIN; + let mut nfds = 2; + let mut errfd = 1; + + while nfds > 0 { + // wait for either pipe to become readable using `select` + let r = unsafe { libc::poll(fds.as_mut_ptr(), nfds, -1) }; + if r == -1 { + let err = io::Error::last_os_error(); + if err.kind() == io::ErrorKind::Interrupted { + continue; + } + return Err(err); + } + + // Read as much as we can from each pipe, ignoring EWOULDBLOCK or + // EAGAIN. If we hit EOF, then this will happen because the underlying + // reader will return Ok(0), in which case we'll see `Ok` ourselves. In + // this case we flip the other fd back into blocking mode and read + // whatever's leftover on that file descriptor. + let handle = |res: io::Result<_>| match res { + Ok(_) => Ok(true), + Err(e) => { + if e.kind() == io::ErrorKind::WouldBlock { + Ok(false) + } else { + Err(e) + } + } + }; + if !err_done && fds[errfd].revents != 0 && handle(err_pipe.read_to_end(&mut err))? { + err_done = true; + nfds -= 1; + } + data(false, &mut err, err_done); + if !out_done && fds[0].revents != 0 && handle(out_pipe.read_to_end(&mut out))? { + out_done = true; + fds[0].fd = err_pipe.as_raw_fd(); + errfd = 0; + nfds -= 1; + } + data(true, &mut out, out_done); + } + Ok(()) + } +} + +#[cfg(windows)] +mod imp { + use std::io; + use std::os::windows::prelude::*; + use std::process::{ChildStderr, ChildStdout}; + use std::slice; + + use miow::iocp::{CompletionPort, CompletionStatus}; + use miow::pipe::NamedPipe; + use miow::Overlapped; + use winapi::shared::winerror::ERROR_BROKEN_PIPE; + + struct Pipe<'a> { + dst: &'a mut Vec, + overlapped: Overlapped, + pipe: NamedPipe, + done: bool, + } + + pub fn read2( + out_pipe: ChildStdout, + err_pipe: ChildStderr, + data: &mut dyn FnMut(bool, &mut Vec, bool), + ) -> io::Result<()> { + let mut out = Vec::new(); + let mut err = Vec::new(); + + let port = CompletionPort::new(1)?; + port.add_handle(0, &out_pipe)?; + port.add_handle(1, &err_pipe)?; + + unsafe { + let mut out_pipe = Pipe::new(out_pipe, &mut out); + let mut err_pipe = Pipe::new(err_pipe, &mut err); + + out_pipe.read()?; + err_pipe.read()?; + + let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; + + while !out_pipe.done || !err_pipe.done { + for status in port.get_many(&mut status, None)? { + if status.token() == 0 { + out_pipe.complete(status); + data(true, out_pipe.dst, out_pipe.done); + out_pipe.read()?; + } else { + err_pipe.complete(status); + data(false, err_pipe.dst, err_pipe.done); + err_pipe.read()?; + } + } + } + + Ok(()) + } + } + + impl<'a> Pipe<'a> { + unsafe fn new(p: P, dst: &'a mut Vec) -> Pipe<'a> { + Pipe { + dst, + pipe: NamedPipe::from_raw_handle(p.into_raw_handle()), + overlapped: Overlapped::zero(), + done: false, + } + } + + unsafe fn read(&mut self) -> io::Result<()> { + let dst = slice_to_end(self.dst); + match self.pipe.read_overlapped(dst, self.overlapped.raw()) { + Ok(_) => Ok(()), + Err(e) => { + if e.raw_os_error() == Some(ERROR_BROKEN_PIPE as i32) { + self.done = true; + Ok(()) + } else { + Err(e) + } + } + } + } + + unsafe fn complete(&mut self, status: &CompletionStatus) { + let prev = self.dst.len(); + self.dst.set_len(prev + status.bytes_transferred() as usize); + if status.bytes_transferred() == 0 { + self.done = true; + } + } + } + + unsafe fn slice_to_end(v: &mut Vec) -> &mut [u8] { + if v.capacity() == 0 { + v.reserve(16); + } + if v.capacity() == v.len() { + v.reserve(1); + } + slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len()) + } +} diff --git a/src/cargo/util/rustc.rs b/src/cargo/util/rustc.rs index 0a3fbf69b03..da47787cd3e 100644 --- a/src/cargo/util/rustc.rs +++ b/src/cargo/util/rustc.rs @@ -1,50 +1,261 @@ -use std::path::Path; +#![allow(deprecated)] // for SipHasher -use util::{self, CargoResult, internal, ChainError}; +use std::collections::hash_map::{Entry, HashMap}; +use std::env; +use std::hash::{Hash, Hasher, SipHasher}; +use std::path::{Path, PathBuf}; +use std::sync::Mutex; +use log::{debug, info, warn}; +use serde::{Deserialize, Serialize}; + +use crate::util::paths; +use crate::util::{self, internal, profile, CargoResult, ProcessBuilder}; + +/// Information on the `rustc` executable +#[derive(Debug)] pub struct Rustc { + /// The location of the exe + pub path: PathBuf, + /// An optional program that will be passed the path of the rust exe as its first argument, and + /// rustc args following this. + pub wrapper: Option, + /// Verbose version information (the output of `rustc -vV`) pub verbose_version: String, + /// The host triple (arch-platform-OS), this comes from verbose_version. pub host: String, - pub cap_lints: bool, + cache: Mutex, } impl Rustc { - /// Run the compiler at `path` to learn varioues pieces of information about - /// it. + /// Runs the compiler at `path` to learn various pieces of information about + /// it, with an optional wrapper. /// /// If successful this function returns a description of the compiler along /// with a list of its capabilities. - pub fn new>(path: P) -> CargoResult { - let mut cmd = try!(util::process(path.as_ref())); + pub fn new( + path: PathBuf, + wrapper: Option, + rustup_rustc: &Path, + cache_location: Option, + ) -> CargoResult { + let _p = profile::start("Rustc::new"); + + let mut cache = Cache::load(&path, rustup_rustc, cache_location); + + let mut cmd = util::process(&path); cmd.arg("-vV"); + let verbose_version = cache.cached_output(&cmd)?.0; - let mut ret = Rustc::blank(); - let mut first = cmd.clone(); - first.arg("--cap-lints").arg("allow"); - let output = match first.exec_with_output() { - Ok(output) => { ret.cap_lints = true; output } - Err(..) => try!(cmd.exec_with_output()), - }; - ret.verbose_version = try!(String::from_utf8(output.stdout).map_err(|_| { - internal("rustc -v didn't return utf8 output") - })); - ret.host = { - let triple = ret.verbose_version.lines().filter(|l| { - l.starts_with("host: ") - }).map(|l| &l[6..]).next(); - let triple = try!(triple.chain_error(|| { - internal("rustc -v didn't have a line for `host:`") - })); + let host = { + let triple = verbose_version + .lines() + .find(|l| l.starts_with("host: ")) + .map(|l| &l[6..]) + .ok_or_else(|| internal("rustc -v didn't have a line for `host:`"))?; triple.to_string() }; - Ok(ret) + + Ok(Rustc { + path, + wrapper: wrapper.map(util::process), + verbose_version, + host, + cache: Mutex::new(cache), + }) } - pub fn blank() -> Rustc { - Rustc { - verbose_version: String::new(), - host: String::new(), - cap_lints: false, + /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. + pub fn process_with(&self, path: impl AsRef) -> ProcessBuilder { + match self.wrapper { + Some(ref wrapper) if !wrapper.get_program().is_empty() => { + let mut cmd = wrapper.clone(); + cmd.arg(path.as_ref()); + cmd + } + _ => util::process(path.as_ref()), } } + + /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. + pub fn process(&self) -> ProcessBuilder { + self.process_with(&self.path) + } + + pub fn process_no_wrapper(&self) -> ProcessBuilder { + util::process(&self.path) + } + + pub fn cached_output(&self, cmd: &ProcessBuilder) -> CargoResult<(String, String)> { + self.cache.lock().unwrap().cached_output(cmd) + } + + pub fn set_wrapper(&mut self, wrapper: ProcessBuilder) { + self.wrapper = Some(wrapper); + } +} + +/// It is a well known fact that `rustc` is not the fastest compiler in the +/// world. What is less known is that even `rustc --version --verbose` takes +/// about a hundred milliseconds! Because we need compiler version info even +/// for no-op builds, we cache it here, based on compiler's mtime and rustup's +/// current toolchain. +/// +/// https://github.com/rust-lang/cargo/issues/5315 +/// https://github.com/rust-lang/rust/issues/49761 +#[derive(Debug)] +struct Cache { + cache_location: Option, + dirty: bool, + data: CacheData, +} + +#[derive(Serialize, Deserialize, Debug, Default)] +struct CacheData { + rustc_fingerprint: u64, + outputs: HashMap, + successes: HashMap, +} + +impl Cache { + fn load(rustc: &Path, rustup_rustc: &Path, cache_location: Option) -> Cache { + match (cache_location, rustc_fingerprint(rustc, rustup_rustc)) { + (Some(cache_location), Ok(rustc_fingerprint)) => { + let empty = CacheData { + rustc_fingerprint, + outputs: HashMap::new(), + successes: HashMap::new(), + }; + let mut dirty = true; + let data = match read(&cache_location) { + Ok(data) => { + if data.rustc_fingerprint == rustc_fingerprint { + debug!("reusing existing rustc info cache"); + dirty = false; + data + } else { + debug!("different compiler, creating new rustc info cache"); + empty + } + } + Err(e) => { + debug!("failed to read rustc info cache: {}", e); + empty + } + }; + return Cache { + cache_location: Some(cache_location), + dirty, + data, + }; + + fn read(path: &Path) -> CargoResult { + let json = paths::read(path)?; + Ok(serde_json::from_str(&json)?) + } + } + (_, fingerprint) => { + if let Err(e) = fingerprint { + warn!("failed to calculate rustc fingerprint: {}", e); + } + debug!("rustc info cache disabled"); + Cache { + cache_location: None, + dirty: false, + data: CacheData::default(), + } + } + } + } + + fn cached_output(&mut self, cmd: &ProcessBuilder) -> CargoResult<(String, String)> { + let key = process_fingerprint(cmd); + match self.data.outputs.entry(key) { + Entry::Occupied(entry) => { + debug!("rustc info cache hit"); + Ok(entry.get().clone()) + } + Entry::Vacant(entry) => { + debug!("rustc info cache miss"); + debug!("running {}", cmd); + let output = cmd.exec_with_output()?; + let stdout = String::from_utf8(output.stdout) + .map_err(|_| internal("rustc didn't return utf8 output"))?; + let stderr = String::from_utf8(output.stderr) + .map_err(|_| internal("rustc didn't return utf8 output"))?; + let output = (stdout, stderr); + entry.insert(output.clone()); + self.dirty = true; + Ok(output) + } + } + } +} + +impl Drop for Cache { + fn drop(&mut self) { + if !self.dirty { + return; + } + if let Some(ref path) = self.cache_location { + let json = serde_json::to_string(&self.data).unwrap(); + match paths::write(path, json.as_bytes()) { + Ok(()) => info!("updated rustc info cache"), + Err(e) => warn!("failed to update rustc info cache: {}", e), + } + } + } +} + +fn rustc_fingerprint(path: &Path, rustup_rustc: &Path) -> CargoResult { + let mut hasher = SipHasher::new_with_keys(0, 0); + + let path = paths::resolve_executable(path)?; + path.hash(&mut hasher); + + paths::mtime(&path)?.hash(&mut hasher); + + // Rustup can change the effective compiler without touching + // the `rustc` binary, so we try to account for this here. + // If we see rustup's env vars, we mix them into the fingerprint, + // but we also mix in the mtime of the actual compiler (and not + // the rustup shim at `~/.cargo/bin/rustup`), because `RUSTUP_TOOLCHAIN` + // could be just `stable-x86_64-unknown-linux-gnu`, i.e, it could + // not mention the version of Rust at all, which changes after + // `rustup update`. + // + // If we don't see rustup env vars, but it looks like the compiler + // is managed by rustup, we conservatively bail out. + let maybe_rustup = rustup_rustc == path; + match ( + maybe_rustup, + env::var("RUSTUP_HOME"), + env::var("RUSTUP_TOOLCHAIN"), + ) { + (_, Ok(rustup_home), Ok(rustup_toolchain)) => { + debug!("adding rustup info to rustc fingerprint"); + rustup_toolchain.hash(&mut hasher); + rustup_home.hash(&mut hasher); + let real_rustc = Path::new(&rustup_home) + .join("toolchains") + .join(rustup_toolchain) + .join("bin") + .join("rustc") + .with_extension(env::consts::EXE_EXTENSION); + paths::mtime(&real_rustc)?.hash(&mut hasher); + } + (true, _, _) => failure::bail!("probably rustup rustc, but without rustup's env vars"), + _ => (), + } + + Ok(hasher.finish()) +} + +fn process_fingerprint(cmd: &ProcessBuilder) -> u64 { + let mut hasher = SipHasher::new_with_keys(0, 0); + cmd.get_args().hash(&mut hasher); + let mut env = cmd.get_envs().iter().collect::>(); + env.sort_unstable(); + env.hash(&mut hasher); + hasher.finish() } diff --git a/src/cargo/util/sha256.rs b/src/cargo/util/sha256.rs index 04e1974210f..b2cd8cab08a 100644 --- a/src/cargo/util/sha256.rs +++ b/src/cargo/util/sha256.rs @@ -1,136 +1,56 @@ -pub use self::imp::Sha256; - -// Someone upstream will link to OpenSSL, so we don't need to explicitly -// link to it ourselves. Hence we pick up Sha256 digests from OpenSSL -#[cfg(not(windows))] -#[allow(bad_style)] -mod imp { - use libc; - - enum EVP_MD_CTX {} - enum EVP_MD {} - enum ENGINE {} - - extern { - fn EVP_DigestInit_ex(ctx: *mut EVP_MD_CTX, - kind: *const EVP_MD, - imp: *mut ENGINE) -> libc::c_int; - fn EVP_DigestUpdate(ctx: *mut EVP_MD_CTX, - d: *const libc::c_void, - cnt: libc::size_t) -> libc::c_int; - fn EVP_DigestFinal_ex(ctx: *mut EVP_MD_CTX, md: *mut libc::c_uchar, - s: *mut libc::c_uint) -> libc::c_int; - fn EVP_MD_CTX_create() -> *mut EVP_MD_CTX; - fn EVP_MD_CTX_destroy(ctx: *mut EVP_MD_CTX); - fn EVP_sha256() -> *const EVP_MD; +use self::crypto_hash::{Algorithm, Hasher}; +use crate::util::{CargoResult, CargoResultExt}; +use crypto_hash; +use std::fs::File; +use std::io::{self, Read, Write}; +use std::path::Path; + +pub struct Sha256(Hasher); + +impl Sha256 { + pub fn new() -> Sha256 { + let hasher = Hasher::new(Algorithm::SHA256); + Sha256(hasher) } - pub struct Sha256 { ctx: *mut EVP_MD_CTX } - - impl Sha256 { - pub fn new() -> Sha256 { - unsafe { - let ctx = EVP_MD_CTX_create(); - assert!(!ctx.is_null()); - let ret = Sha256 { ctx: ctx }; - let n = EVP_DigestInit_ex(ret.ctx, EVP_sha256(), 0 as *mut _); - assert_eq!(n, 1); - return ret; - } - } - - pub fn update(&mut self, bytes: &[u8]) { - unsafe { - let n = EVP_DigestUpdate(self.ctx, bytes.as_ptr() as *const _, - bytes.len() as libc::size_t); - assert_eq!(n, 1); - } - } + pub fn update(&mut self, bytes: &[u8]) -> &mut Sha256 { + let _ = self.0.write_all(bytes); + self + } - pub fn finish(&mut self) -> [u8; 32] { - unsafe { - let mut ret = [0u8; 32]; - let mut out = 0; - let n = EVP_DigestFinal_ex(self.ctx, ret.as_mut_ptr(), &mut out); - assert_eq!(n, 1); - assert_eq!(out, 32); - return ret; + pub fn update_file(&mut self, mut file: &File) -> io::Result<&mut Sha256> { + let mut buf = [0; 64 * 1024]; + loop { + let n = file.read(&mut buf)?; + if n == 0 { + break Ok(self); } + self.update(&buf[..n]); } } - impl Drop for Sha256 { - fn drop(&mut self) { - unsafe { EVP_MD_CTX_destroy(self.ctx) } - } + pub fn update_path>(&mut self, path: P) -> CargoResult<&mut Sha256> { + let path = path.as_ref(); + let file = File::open(path)?; + self.update_file(&file) + .chain_err(|| format!("failed to read `{}`", path.display()))?; + Ok(self) } -} -// Leverage the crypto APIs that windows has built in. -#[cfg(windows)] -mod imp { - extern crate winapi; - extern crate advapi32; - use std::io; - use std::ptr; - - use self::winapi::{DWORD, HCRYPTPROV, HCRYPTHASH}; - use self::winapi::{PROV_RSA_AES, CRYPT_SILENT, CRYPT_VERIFYCONTEXT, CALG_SHA_256, HP_HASHVAL}; - use self::advapi32::{CryptAcquireContextW, CryptCreateHash, CryptDestroyHash}; - use self::advapi32::{CryptGetHashParam, CryptHashData, CryptReleaseContext}; - - macro_rules! call{ ($e:expr) => ({ - if $e == 0 { - panic!("failed {}: {}", stringify!($e), io::Error::last_os_error()) - } - }) } - - pub struct Sha256 { - hcryptprov: HCRYPTPROV, - hcrypthash: HCRYPTHASH, + pub fn finish(&mut self) -> [u8; 32] { + let mut ret = [0u8; 32]; + let data = self.0.finish(); + ret.copy_from_slice(&data[..]); + ret } - impl Sha256 { - pub fn new() -> Sha256 { - let mut hcp = 0; - call!(unsafe { - CryptAcquireContextW(&mut hcp, ptr::null(), ptr::null(), - PROV_RSA_AES, - CRYPT_VERIFYCONTEXT | CRYPT_SILENT) - }); - let mut ret = Sha256 { hcryptprov: hcp, hcrypthash: 0 }; - call!(unsafe { - CryptCreateHash(ret.hcryptprov, CALG_SHA_256, - 0, 0, &mut ret.hcrypthash) - }); - return ret; - } - - pub fn update(&mut self, bytes: &[u8]) { - call!(unsafe { - CryptHashData(self.hcrypthash, bytes.as_ptr() as *mut _, - bytes.len() as DWORD, 0) - }) - } - - pub fn finish(&mut self) -> [u8; 32] { - let mut ret = [0u8; 32]; - let mut len = ret.len() as DWORD; - call!(unsafe { - CryptGetHashParam(self.hcrypthash, HP_HASHVAL, ret.as_mut_ptr(), - &mut len, 0) - }); - assert_eq!(len as usize, ret.len()); - return ret; - } + pub fn finish_hex(&mut self) -> String { + hex::encode(self.finish()) } +} - impl Drop for Sha256 { - fn drop(&mut self) { - if self.hcrypthash != 0 { - call!(unsafe { CryptDestroyHash(self.hcrypthash) }); - } - call!(unsafe { CryptReleaseContext(self.hcryptprov, 0) }); - } +impl Default for Sha256 { + fn default() -> Self { + Self::new() } } diff --git a/src/cargo/util/shell_escape.rs b/src/cargo/util/shell_escape.rs deleted file mode 100644 index 2acbd00ea00..00000000000 --- a/src/cargo/util/shell_escape.rs +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::borrow::Cow; -use std::env; - -pub fn escape(s: Cow) -> Cow { - if cfg!(unix) { - unix::escape(s) - } else if env::var("MSYSTEM").is_ok() { - unix::escape(s) - } else { - windows::escape(s) - } -} - -pub mod windows { - use std::borrow::Cow; - use std::iter::repeat; - - /// Escape for the windows cmd.exe shell, for more info see this url: - /// - /// http://blogs.msdn.com/b/twistylittlepassagesallalike/archive/2011/04/23 - /// /everyone-quotes-arguments-the-wrong-way.aspx - pub fn escape(s: Cow) -> Cow { - let mut needs_escape = false; - for ch in s.chars() { - match ch { - '"' | '\t' | '\n' | ' ' => needs_escape = true, - _ => {} - } - } - if !needs_escape { - return s - } - let mut es = String::with_capacity(s.len()); - es.push('"'); - let mut chars = s.chars().peekable(); - loop { - let mut nslashes = 0; - while let Some(&'\\') = chars.peek() { - chars.next(); - nslashes += 1; - } - - match chars.next() { - Some('"') => { - es.extend(repeat('\\').take(nslashes * 2 + 1)); - es.push('"'); - } - Some(c) => { - es.extend(repeat('\\').take(nslashes)); - es.push(c); - } - None => { - es.extend(repeat('\\').take(nslashes * 2)); - break; - } - } - - } - es.push('"'); - es.into() - } - - #[test] - fn test_escape() { - assert_eq!(escape("--aaa=bbb-ccc".into()), "--aaa=bbb-ccc"); - assert_eq!(escape("linker=gcc -L/foo -Wl,bar".into()), - r#""linker=gcc -L/foo -Wl,bar""#); - assert_eq!(escape(r#"--features="default""#.into()), - r#""--features=\"default\"""#); - assert_eq!(escape(r#"\path\to\my documents\"#.into()), - r#""\path\to\my documents\\""#); - } -} - -pub mod unix { - use std::borrow::Cow; - - const SHELL_SPECIAL: &'static str = r#" \$'"`!"#; - - /// Escape characters that may have special meaning in a shell, - /// including spaces. - pub fn escape(s: Cow) -> Cow { - let escape_char = '\\'; - // check if string needs to be escaped - let clean = SHELL_SPECIAL.chars().all(|sp_char| !s.contains(sp_char)); - if clean { - return s - } - let mut es = String::with_capacity(s.len()); - for ch in s.chars() { - if SHELL_SPECIAL.contains(ch) { - es.push(escape_char); - } - es.push(ch) - } - es.into() - } - - #[test] - fn test_escape() { - assert_eq!(escape("--aaa=bbb-ccc".into()), "--aaa=bbb-ccc"); - assert_eq!(escape("linker=gcc -L/foo -Wl,bar".into()), - r#"linker=gcc\ -L/foo\ -Wl,bar"#); - assert_eq!(escape(r#"--features="default""#.into()), - r#"--features=\"default\""#); - assert_eq!(escape(r#"'!\$`\\\n "#.into()), - r#"\'\!\\\$\`\\\\\\n\ "#); - } -} diff --git a/src/cargo/util/to_semver.rs b/src/cargo/util/to_semver.rs index ad6aff16e3e..65cc078fd29 100644 --- a/src/cargo/util/to_semver.rs +++ b/src/cargo/util/to_semver.rs @@ -1,30 +1,33 @@ +use crate::util::errors::CargoResult; use semver::Version; pub trait ToSemver { - fn to_semver(self) -> Result; + fn to_semver(self) -> CargoResult; } impl ToSemver for Version { - fn to_semver(self) -> Result { Ok(self) } + fn to_semver(self) -> CargoResult { + Ok(self) + } } impl<'a> ToSemver for &'a str { - fn to_semver(self) -> Result { + fn to_semver(self) -> CargoResult { match Version::parse(self) { Ok(v) => Ok(v), - Err(..) => Err(format!("cannot parse '{}' as a semver", self)), + Err(..) => Err(failure::format_err!("cannot parse '{}' as a semver", self)), } } } impl<'a> ToSemver for &'a String { - fn to_semver(self) -> Result { + fn to_semver(self) -> CargoResult { (**self).to_semver() } } impl<'a> ToSemver for &'a Version { - fn to_semver(self) -> Result { + fn to_semver(self) -> CargoResult { Ok(self.clone()) } } diff --git a/src/cargo/util/to_url.rs b/src/cargo/util/to_url.rs deleted file mode 100644 index 2e3365cb255..00000000000 --- a/src/cargo/util/to_url.rs +++ /dev/null @@ -1,42 +0,0 @@ -use url::{self, Url, UrlParser}; -use std::path::Path; - -pub trait ToUrl { - fn to_url(self) -> Result; -} - -impl ToUrl for Url { - fn to_url(self) -> Result { - Ok(self) - } -} - -impl<'a> ToUrl for &'a Url { - fn to_url(self) -> Result { - Ok(self.clone()) - } -} - -impl<'a> ToUrl for &'a str { - fn to_url(self) -> Result { - UrlParser::new().scheme_type_mapper(mapper).parse(self).map_err(|s| { - format!("invalid url `{}`: {}", self, s) - }) - } -} - -impl<'a> ToUrl for &'a Path { - fn to_url(self) -> Result { - Url::from_file_path(self).map_err(|()| { - format!("invalid path url `{}`", self.display()) - }) - } -} - -fn mapper(s: &str) -> url::SchemeType { - match s { - "git" => url::SchemeType::Relative(9418), - "ssh" => url::SchemeType::Relative(22), - s => url::whatwg_scheme_type_mapper(s), - } -} diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs deleted file mode 100644 index 7e0115e0418..00000000000 --- a/src/cargo/util/toml.rs +++ /dev/null @@ -1,950 +0,0 @@ -use std::collections::HashMap; -use std::default::Default; -use std::fmt; -use std::fs; -use std::path::{Path, PathBuf}; -use std::str; - -use toml; -use semver; -use rustc_serialize::{Decodable, Decoder}; - -use core::{SourceId, Profiles}; -use core::{Summary, Manifest, Target, Dependency, DependencyInner, PackageId, - GitReference}; -use core::dependency::Kind; -use core::manifest::{LibKind, Profile, ManifestMetadata}; -use core::package_id::Metadata; -use util::{self, CargoResult, human, ToUrl, ToSemver, ChainError, Config}; - -/// Representation of the projects file layout. -/// -/// This structure is used to hold references to all project files that are relevant to cargo. - -#[derive(Clone)] -pub struct Layout { - pub root: PathBuf, - lib: Option, - bins: Vec, - examples: Vec, - tests: Vec, - benches: Vec, -} - -impl Layout { - fn main(&self) -> Option<&PathBuf> { - self.bins.iter().find(|p| { - match p.file_name().and_then(|s| s.to_str()) { - Some(s) => s == "main.rs", - None => false - } - }) - } -} - -fn try_add_file(files: &mut Vec, file: PathBuf) { - if fs::metadata(&file).is_ok() { - files.push(file); - } -} -fn try_add_files(files: &mut Vec, root: PathBuf) { - match fs::read_dir(&root) { - Ok(new) => { - files.extend(new.filter_map(|dir| { - dir.map(|d| d.path()).ok() - }).filter(|f| { - f.extension().and_then(|s| s.to_str()) == Some("rs") - }).filter(|f| { - // Some unix editors may create "dotfiles" next to original - // source files while they're being edited, but these files are - // rarely actually valid Rust source files and sometimes aren't - // even valid UTF-8. Here we just ignore all of them and require - // that they are explicitly specified in Cargo.toml if desired. - f.file_name().and_then(|s| s.to_str()).map(|s| { - !s.starts_with(".") - }).unwrap_or(true) - })) - } - Err(_) => {/* just don't add anything if the directory doesn't exist, etc. */} - } -} - -/// Returns a new `Layout` for a given root path. -/// The `root_path` represents the directory that contains the `Cargo.toml` file. - -pub fn project_layout(root_path: &Path) -> Layout { - let mut lib = None; - let mut bins = vec!(); - let mut examples = vec!(); - let mut tests = vec!(); - let mut benches = vec!(); - - let lib_canidate = root_path.join("src").join("lib.rs"); - if fs::metadata(&lib_canidate).is_ok() { - lib = Some(lib_canidate); - } - - try_add_file(&mut bins, root_path.join("src").join("main.rs")); - try_add_files(&mut bins, root_path.join("src").join("bin")); - - try_add_files(&mut examples, root_path.join("examples")); - - try_add_files(&mut tests, root_path.join("tests")); - try_add_files(&mut benches, root_path.join("benches")); - - Layout { - root: root_path.to_path_buf(), - lib: lib, - bins: bins, - examples: examples, - tests: tests, - benches: benches, - } -} - -pub fn to_manifest(contents: &[u8], - source_id: &SourceId, - layout: Layout, - config: &Config) - -> CargoResult<(Manifest, Vec)> { - let manifest = layout.root.join("Cargo.toml"); - let manifest = match util::without_prefix(&manifest, config.cwd()) { - Some(path) => path.to_path_buf(), - None => manifest.clone(), - }; - let contents = try!(str::from_utf8(contents).map_err(|_| { - human(format!("{} is not valid UTF-8", manifest.display())) - })); - let root = try!(parse(contents, &manifest)); - let mut d = toml::Decoder::new(toml::Value::Table(root)); - let manifest: TomlManifest = try!(Decodable::decode(&mut d).map_err(|e| { - human(e.to_string()) - })); - - let pair = try!(manifest.to_manifest(source_id, &layout, config)); - let (mut manifest, paths) = pair; - match d.toml { - Some(ref toml) => add_unused_keys(&mut manifest, toml, "".to_string()), - None => {} - } - if !manifest.targets().iter().any(|t| !t.is_custom_build()) { - return Err(human(format!("no targets specified in the manifest\n either \ - src/lib.rs, src/main.rs, a [lib] section, or [[bin]] \ - section must be present"))) - } - return Ok((manifest, paths)); - - fn add_unused_keys(m: &mut Manifest, toml: &toml::Value, key: String) { - match *toml { - toml::Value::Table(ref table) => { - for (k, v) in table.iter() { - add_unused_keys(m, v, if key.len() == 0 { - k.clone() - } else { - key.clone() + "." + k - }) - } - } - toml::Value::Array(ref arr) => { - for v in arr.iter() { - add_unused_keys(m, v, key.clone()); - } - } - _ => m.add_warning(format!("unused manifest key: {}", key)), - } - } -} - -pub fn parse(toml: &str, file: &Path) -> CargoResult { - let mut parser = toml::Parser::new(&toml); - match parser.parse() { - Some(toml) => return Ok(toml), - None => {} - } - let mut error_str = format!("could not parse input as TOML\n"); - for error in parser.errors.iter() { - let (loline, locol) = parser.to_linecol(error.lo); - let (hiline, hicol) = parser.to_linecol(error.hi); - error_str.push_str(&format!("{}:{}:{}{} {}\n", - file.display(), - loline + 1, locol + 1, - if loline != hiline || locol != hicol { - format!("-{}:{}", hiline + 1, - hicol + 1) - } else { - "".to_string() - }, - error.desc)); - } - Err(human(error_str)) -} - -type TomlLibTarget = TomlTarget; -type TomlBinTarget = TomlTarget; -type TomlExampleTarget = TomlTarget; -type TomlTestTarget = TomlTarget; -type TomlBenchTarget = TomlTarget; - -/* - * TODO: Make all struct fields private - */ - -#[derive(RustcDecodable)] -pub enum TomlDependency { - Simple(String), - Detailed(DetailedTomlDependency) -} - - -#[derive(RustcDecodable, Clone, Default)] -pub struct DetailedTomlDependency { - version: Option, - path: Option, - git: Option, - branch: Option, - tag: Option, - rev: Option, - features: Option>, - optional: Option, - default_features: Option, -} - -#[derive(RustcDecodable)] -pub struct TomlManifest { - package: Option>, - project: Option>, - profile: Option, - lib: Option, - bin: Option>, - example: Option>, - test: Option>, - bench: Option>, - dependencies: Option>, - dev_dependencies: Option>, - build_dependencies: Option>, - features: Option>>, - target: Option>, -} - -#[derive(RustcDecodable, Clone, Default)] -pub struct TomlProfiles { - test: Option, - doc: Option, - bench: Option, - dev: Option, - release: Option, -} - -#[derive(RustcDecodable, Clone, Default)] -pub struct TomlProfile { - opt_level: Option, - lto: Option, - codegen_units: Option, - debug: Option, - debug_assertions: Option, - rpath: Option, -} - -#[derive(RustcDecodable)] -pub struct TomlProject { - name: String, - version: TomlVersion, - authors: Vec, - build: Option, - links: Option, - exclude: Option>, - include: Option>, - - // package metadata - description: Option, - homepage: Option, - documentation: Option, - readme: Option, - keywords: Option>, - license: Option, - license_file: Option, - repository: Option, -} - -pub struct TomlVersion { - version: semver::Version, -} - -impl Decodable for TomlVersion { - fn decode(d: &mut D) -> Result { - let s = try!(d.read_str()); - match s.to_semver() { - Ok(s) => Ok(TomlVersion { version: s }), - Err(e) => Err(d.error(&e)), - } - } -} - -impl TomlProject { - pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult { - PackageId::new(&self.name, self.version.version.clone(), - source_id) - } -} - -struct Context<'a, 'b> { - deps: &'a mut Vec, - source_id: &'a SourceId, - nested_paths: &'a mut Vec, - config: &'b Config, -} - -// These functions produce the equivalent of specific manifest entries. One -// wrinkle is that certain paths cannot be represented in the manifest due -// to Toml's UTF-8 requirement. This could, in theory, mean that certain -// otherwise acceptable executable names are not used when inside of -// `src/bin/*`, but it seems ok to not build executables with non-UTF8 -// paths. -fn inferred_lib_target(name: &str, layout: &Layout) -> Option { - layout.lib.as_ref().map(|lib| { - TomlTarget { - name: Some(name.to_string()), - path: Some(PathValue::Path(lib.clone())), - .. TomlTarget::new() - } - }) -} - -fn inferred_bin_targets(name: &str, layout: &Layout) -> Vec { - layout.bins.iter().filter_map(|bin| { - let name = if &**bin == Path::new("src/main.rs") || - *bin == layout.root.join("src").join("main.rs") { - Some(name.to_string()) - } else { - bin.file_stem().and_then(|s| s.to_str()).map(|f| f.to_string()) - }; - - name.map(|name| { - TomlTarget { - name: Some(name), - path: Some(PathValue::Path(bin.clone())), - .. TomlTarget::new() - } - }) - }).collect() -} - -fn inferred_example_targets(layout: &Layout) -> Vec { - layout.examples.iter().filter_map(|ex| { - ex.file_stem().and_then(|s| s.to_str()).map(|name| { - TomlTarget { - name: Some(name.to_string()), - path: Some(PathValue::Path(ex.clone())), - .. TomlTarget::new() - } - }) - }).collect() -} - -fn inferred_test_targets(layout: &Layout) -> Vec { - layout.tests.iter().filter_map(|ex| { - ex.file_stem().and_then(|s| s.to_str()).map(|name| { - TomlTarget { - name: Some(name.to_string()), - path: Some(PathValue::Path(ex.clone())), - .. TomlTarget::new() - } - }) - }).collect() -} - -fn inferred_bench_targets(layout: &Layout) -> Vec { - layout.benches.iter().filter_map(|ex| { - ex.file_stem().and_then(|s| s.to_str()).map(|name| { - TomlTarget { - name: Some(name.to_string()), - path: Some(PathValue::Path(ex.clone())), - .. TomlTarget::new() - } - }) - }).collect() -} - -impl TomlManifest { - pub fn to_manifest(&self, source_id: &SourceId, layout: &Layout, - config: &Config) - -> CargoResult<(Manifest, Vec)> { - let mut nested_paths = vec!(); - let mut warnings = vec!(); - - let project = self.project.as_ref().or_else(|| self.package.as_ref()); - let project = try!(project.chain_error(|| { - human("No `package` or `project` section found.") - })); - - if project.name.trim().is_empty() { - return Err(human("package name cannot be an empty string.")) - } - - let pkgid = try!(project.to_package_id(source_id)); - let metadata = pkgid.generate_metadata(&layout.root); - - // If we have no lib at all, use the inferred lib if available - // If we have a lib with a path, we're done - // If we have a lib with no path, use the inferred lib or_else package name - - let lib = match self.lib { - Some(ref lib) => { - try!(validate_library_name(lib)); - Some( - TomlTarget { - name: lib.name.clone().or(Some(project.name.clone())), - path: lib.path.clone().or( - layout.lib.as_ref().map(|p| PathValue::Path(p.clone())) - ), - ..lib.clone() - } - ) - } - None => inferred_lib_target(&project.name, layout), - }; - - let bins = match self.bin { - Some(ref bins) => { - let bin = layout.main(); - - for target in bins { - try!(validate_binary_name(target)); - } - - bins.iter().map(|t| { - if bin.is_some() && t.path.is_none() { - TomlTarget { - path: bin.as_ref().map(|&p| PathValue::Path(p.clone())), - .. t.clone() - } - } else { - t.clone() - } - }).collect() - } - None => inferred_bin_targets(&project.name, layout) - }; - - let blacklist = vec!["build", "deps", "examples", "native"]; - - for bin in bins.iter() { - if blacklist.iter().find(|&x| *x == bin.name()) != None { - return Err(human(&format!("the binary target name `{}` is \ - forbidden", bin.name()))); - } - } - - let examples = match self.example { - Some(ref examples) => { - for target in examples { - try!(validate_example_name(target)); - } - examples.clone() - } - None => inferred_example_targets(layout) - }; - - let tests = match self.test { - Some(ref tests) => { - for target in tests { - try!(validate_test_name(target)); - } - tests.clone() - } - None => inferred_test_targets(layout) - }; - - let benches = match self.bench { - Some(ref benches) => { - for target in benches { - try!(validate_bench_name(target)); - } - benches.clone() - } - None => inferred_bench_targets(layout) - }; - - // processing the custom build script - let new_build = project.build.as_ref().map(PathBuf::from); - - // Get targets - let targets = normalize(&lib, - &bins, - new_build, - &examples, - &tests, - &benches, - &metadata, - &mut warnings); - - if targets.is_empty() { - debug!("manifest has no build targets"); - } - - let mut deps = Vec::new(); - - { - - let mut cx = Context { - deps: &mut deps, - source_id: source_id, - nested_paths: &mut nested_paths, - config: config, - }; - - // Collect the deps - try!(process_dependencies(&mut cx, self.dependencies.as_ref(), - |dep| dep)); - try!(process_dependencies(&mut cx, self.dev_dependencies.as_ref(), - |dep| dep.set_kind(Kind::Development))); - try!(process_dependencies(&mut cx, self.build_dependencies.as_ref(), - |dep| dep.set_kind(Kind::Build))); - - if let Some(targets) = self.target.as_ref() { - for (name, platform) in targets.iter() { - try!(process_dependencies(&mut cx, - platform.dependencies.as_ref(), - |dep| { - dep.set_only_for_platform(Some(name.clone())) - })); - try!(process_dependencies(&mut cx, - platform.build_dependencies.as_ref(), - |dep| { - dep.set_only_for_platform(Some(name.clone())) - .set_kind(Kind::Build) - })); - try!(process_dependencies(&mut cx, - platform.dev_dependencies.as_ref(), - |dep| { - dep.set_only_for_platform(Some(name.clone())) - .set_kind(Kind::Development) - })); - } - } - } - - let exclude = project.exclude.clone().unwrap_or(Vec::new()); - let include = project.include.clone().unwrap_or(Vec::new()); - - let summary = try!(Summary::new(pkgid, deps, - self.features.clone() - .unwrap_or(HashMap::new()))); - let metadata = ManifestMetadata { - description: project.description.clone(), - homepage: project.homepage.clone(), - documentation: project.documentation.clone(), - readme: project.readme.clone(), - authors: project.authors.clone(), - license: project.license.clone(), - license_file: project.license_file.clone(), - repository: project.repository.clone(), - keywords: project.keywords.clone().unwrap_or(Vec::new()), - }; - let profiles = build_profiles(&self.profile); - let mut manifest = Manifest::new(summary, - targets, - exclude, - include, - project.links.clone(), - metadata, - profiles); - if project.license_file.is_some() && project.license.is_some() { - manifest.add_warning(format!("warning: only one of `license` or \ - `license-file` is necessary")); - } - for warning in warnings { - manifest.add_warning(warning.clone()); - } - - Ok((manifest, nested_paths)) - } -} - -fn validate_library_name(target: &TomlTarget) -> CargoResult<()> { - match target.name { - Some(ref name) => { - if name.trim().is_empty() { - Err(human(format!("library target names cannot be empty."))) - } else if name.contains("-") { - Err(human(format!("library target names cannot contain hyphens: {}", - name))) - } else { - Ok(()) - } - }, - None => Ok(()) - } -} - -fn validate_binary_name(target: &TomlTarget) -> CargoResult<()> { - match target.name { - Some(ref name) => { - if name.trim().is_empty() { - Err(human(format!("binary target names cannot be empty."))) - } else { - Ok(()) - } - }, - None => Err(human(format!("binary target bin.name is required"))) - } -} - -fn validate_example_name(target: &TomlTarget) -> CargoResult<()> { - match target.name { - Some(ref name) => { - if name.trim().is_empty() { - Err(human(format!("example target names cannot be empty"))) - } else { - Ok(()) - } - }, - None => Err(human(format!("example target example.name is required"))) - } -} - -fn validate_test_name(target: &TomlTarget) -> CargoResult<()> { - match target.name { - Some(ref name) => { - if name.trim().is_empty() { - Err(human(format!("test target names cannot be empty"))) - } else { - Ok(()) - } - }, - None => Err(human(format!("test target test.name is required"))) - } -} - -fn validate_bench_name(target: &TomlTarget) -> CargoResult<()> { - match target.name { - Some(ref name) => { - if name.trim().is_empty() { - Err(human(format!("bench target names cannot be empty"))) - } else { - Ok(()) - } - }, - None => Err(human(format!("bench target bench.name is required"))) - } -} - -fn process_dependencies(cx: &mut Context, - new_deps: Option<&HashMap>, - mut f: F) -> CargoResult<()> - where F: FnMut(DependencyInner) -> DependencyInner -{ - let dependencies = match new_deps { - Some(ref dependencies) => dependencies, - None => return Ok(()) - }; - for (n, v) in dependencies.iter() { - let details = match *v { - TomlDependency::Simple(ref version) => { - let mut d: DetailedTomlDependency = Default::default(); - d.version = Some(version.clone()); - d - } - TomlDependency::Detailed(ref details) => details.clone(), - }; - let reference = details.branch.clone().map(GitReference::Branch) - .or_else(|| details.tag.clone().map(GitReference::Tag)) - .or_else(|| details.rev.clone().map(GitReference::Rev)) - .unwrap_or_else(|| GitReference::Branch("master".to_string())); - - let new_source_id = match details.git { - Some(ref git) => { - let loc = try!(git.to_url().map_err(|e| { - human(e) - })); - Some(SourceId::for_git(&loc, reference)) - } - None => { - details.path.as_ref().map(|path| { - cx.nested_paths.push(PathBuf::from(path)); - cx.source_id.clone() - }) - } - }.unwrap_or(try!(SourceId::for_central(cx.config))); - - let dep = try!(DependencyInner::parse(&n, - details.version.as_ref() - .map(|v| &v[..]), - &new_source_id)); - let dep = f(dep) - .set_features(details.features.unwrap_or(Vec::new())) - .set_default_features(details.default_features.unwrap_or(true)) - .set_optional(details.optional.unwrap_or(false)) - .into_dependency(); - cx.deps.push(dep); - } - - Ok(()) -} - -#[derive(RustcDecodable, Debug, Clone)] -struct TomlTarget { - name: Option, - crate_type: Option>, - path: Option, - test: Option, - doctest: Option, - bench: Option, - doc: Option, - plugin: Option, - harness: Option, -} - -#[derive(RustcDecodable, Clone)] -enum PathValue { - String(String), - Path(PathBuf), -} - -/// Corresponds to a `target` entry, but `TomlTarget` is already used. -#[derive(RustcDecodable)] -struct TomlPlatform { - dependencies: Option>, - build_dependencies: Option>, - dev_dependencies: Option>, -} - -impl TomlTarget { - fn new() -> TomlTarget { - TomlTarget { - name: None, - crate_type: None, - path: None, - test: None, - doctest: None, - bench: None, - doc: None, - plugin: None, - harness: None, - } - } - - fn name(&self) -> String { - match self.name { - Some(ref name) => name.clone(), - None => panic!("target name is required") - } - } -} - -impl PathValue { - fn to_path(&self) -> PathBuf { - match *self { - PathValue::String(ref s) => PathBuf::from(s), - PathValue::Path(ref p) => p.clone(), - } - } -} - -impl fmt::Debug for PathValue { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - PathValue::String(ref s) => s.fmt(f), - PathValue::Path(ref p) => p.display().fmt(f), - } - } -} - -fn normalize(lib: &Option, - bins: &[TomlBinTarget], - custom_build: Option, - examples: &[TomlExampleTarget], - tests: &[TomlTestTarget], - benches: &[TomlBenchTarget], - metadata: &Metadata, - warnings: &mut Vec) -> Vec { - fn configure(toml: &TomlTarget, target: &mut Target) { - let t2 = target.clone(); - target.set_tested(toml.test.unwrap_or(t2.tested())) - .set_doc(toml.doc.unwrap_or(t2.documented())) - .set_doctest(toml.doctest.unwrap_or(t2.doctested())) - .set_benched(toml.bench.unwrap_or(t2.benched())) - .set_harness(toml.harness.unwrap_or(t2.harness())) - .set_for_host(toml.plugin.unwrap_or(t2.for_host())); - } - - fn lib_target(dst: &mut Vec, - l: &TomlLibTarget, - metadata: &Metadata, - warnings: &mut Vec) { - let path = l.path.clone().unwrap_or( - PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name()))) - ); - let crate_types = match l.crate_type.clone() { - Some(kinds) => { - // For now, merely warn about invalid crate types. - // In the future, it might be nice to make them errors. - kinds.iter().filter_map(|s| { - let kind = LibKind::from_str(s); - if let Err(ref error) = kind { - warnings.push(format!("warning: {}", error)) - } - kind.ok() - }).collect() - } - None => { - vec![ if l.plugin == Some(true) {LibKind::Dylib} - else {LibKind::Lib} ] - } - }; - - let mut target = Target::lib_target(&l.name(), crate_types, - &path.to_path(), - metadata.clone()); - configure(l, &mut target); - dst.push(target); - } - - fn bin_targets(dst: &mut Vec, bins: &[TomlBinTarget], - default: &mut FnMut(&TomlBinTarget) -> PathBuf) { - for bin in bins.iter() { - let path = bin.path.clone().unwrap_or_else(|| { - PathValue::Path(default(bin)) - }); - let mut target = Target::bin_target(&bin.name(), &path.to_path(), - None); - configure(bin, &mut target); - dst.push(target); - } - } - - fn custom_build_target(dst: &mut Vec, cmd: &Path) { - let name = format!("build-script-{}", - cmd.file_stem().and_then(|s| s.to_str()).unwrap_or("")); - - dst.push(Target::custom_build_target(&name, cmd, None)); - } - - fn example_targets(dst: &mut Vec, - examples: &[TomlExampleTarget], - default: &mut FnMut(&TomlExampleTarget) -> PathBuf) { - for ex in examples.iter() { - let path = ex.path.clone().unwrap_or_else(|| { - PathValue::Path(default(ex)) - }); - - let mut target = Target::example_target(&ex.name(), &path.to_path()); - configure(ex, &mut target); - dst.push(target); - } - } - - fn test_targets(dst: &mut Vec, tests: &[TomlTestTarget], - metadata: &Metadata, - default: &mut FnMut(&TomlTestTarget) -> PathBuf) { - for test in tests.iter() { - let path = test.path.clone().unwrap_or_else(|| { - PathValue::Path(default(test)) - }); - - // make sure this metadata is different from any same-named libs. - let mut metadata = metadata.clone(); - metadata.mix(&format!("test-{}", test.name())); - - let mut target = Target::test_target(&test.name(), &path.to_path(), - metadata); - configure(test, &mut target); - dst.push(target); - } - } - - fn bench_targets(dst: &mut Vec, benches: &[TomlBenchTarget], - metadata: &Metadata, - default: &mut FnMut(&TomlBenchTarget) -> PathBuf) { - for bench in benches.iter() { - let path = bench.path.clone().unwrap_or_else(|| { - PathValue::Path(default(bench)) - }); - - // make sure this metadata is different from any same-named libs. - let mut metadata = metadata.clone(); - metadata.mix(&format!("bench-{}", bench.name())); - - let mut target = Target::bench_target(&bench.name(), - &path.to_path(), - metadata); - configure(bench, &mut target); - dst.push(target); - } - } - - let mut ret = Vec::new(); - - if let Some(ref lib) = *lib { - lib_target(&mut ret, lib, metadata, warnings); - bin_targets(&mut ret, bins, - &mut |bin| Path::new("src").join("bin") - .join(&format!("{}.rs", bin.name()))); - } else if bins.len() > 0 { - bin_targets(&mut ret, bins, - &mut |bin| Path::new("src") - .join(&format!("{}.rs", bin.name()))); - } - - if let Some(custom_build) = custom_build { - custom_build_target(&mut ret, &custom_build); - } - - example_targets(&mut ret, examples, - &mut |ex| Path::new("examples") - .join(&format!("{}.rs", ex.name()))); - - test_targets(&mut ret, tests, metadata, &mut |test| { - if test.name() == "test" { - Path::new("src").join("test.rs") - } else { - Path::new("tests").join(&format!("{}.rs", test.name())) - } - }); - - bench_targets(&mut ret, benches, metadata, &mut |bench| { - if bench.name() == "bench" { - Path::new("src").join("bench.rs") - } else { - Path::new("benches").join(&format!("{}.rs", bench.name())) - } - }); - - ret -} - -fn build_profiles(profiles: &Option) -> Profiles { - let profiles = profiles.as_ref(); - return Profiles { - release: merge(Profile::default_release(), - profiles.and_then(|p| p.release.as_ref())), - dev: merge(Profile::default_dev(), - profiles.and_then(|p| p.dev.as_ref())), - test: merge(Profile::default_test(), - profiles.and_then(|p| p.test.as_ref())), - bench: merge(Profile::default_bench(), - profiles.and_then(|p| p.bench.as_ref())), - doc: merge(Profile::default_doc(), - profiles.and_then(|p| p.doc.as_ref())), - }; - - fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile { - let &TomlProfile { - opt_level, lto, codegen_units, debug, debug_assertions, rpath - } = match toml { - Some(toml) => toml, - None => return profile, - }; - Profile { - opt_level: opt_level.unwrap_or(profile.opt_level), - lto: lto.unwrap_or(profile.lto), - codegen_units: codegen_units, - rustc_args: None, - debuginfo: debug.unwrap_or(profile.debuginfo), - debug_assertions: debug_assertions.unwrap_or(profile.debug_assertions), - rpath: rpath.unwrap_or(profile.rpath), - test: profile.test, - doc: profile.doc, - } - } -} diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs new file mode 100644 index 00000000000..3633ff25b13 --- /dev/null +++ b/src/cargo/util/toml/mod.rs @@ -0,0 +1,1604 @@ +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; +use std::fmt; +use std::fs; +use std::path::{Path, PathBuf}; +use std::rc::Rc; +use std::str; + +use failure::bail; +use log::{debug, trace}; +use semver::{self, VersionReq}; +use serde::de; +use serde::ser; +use serde::{Deserialize, Serialize}; +use url::Url; + +use crate::core::dependency::Kind; +use crate::core::manifest::{LibKind, ManifestMetadata, TargetSourcePath, Warnings}; +use crate::core::profiles::Profiles; +use crate::core::{Dependency, Manifest, PackageId, Summary, Target}; +use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest}; +use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig}; +use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY}; +use crate::util::errors::{CargoResult, CargoResultExt, ManifestError}; +use crate::util::{self, paths, validate_package_name, Config, IntoUrl, Platform}; + +mod targets; +use self::targets::targets; + +pub fn read_manifest( + path: &Path, + source_id: SourceId, + config: &Config, +) -> Result<(EitherManifest, Vec), ManifestError> { + trace!( + "read_manifest; path={}; source-id={}", + path.display(), + source_id + ); + let contents = paths::read(path).map_err(|err| ManifestError::new(err, path.into()))?; + + do_read_manifest(&contents, path, source_id, config) + .chain_err(|| format!("failed to parse manifest at `{}`", path.display())) + .map_err(|err| ManifestError::new(err, path.into())) +} + +fn do_read_manifest( + contents: &str, + manifest_file: &Path, + source_id: SourceId, + config: &Config, +) -> CargoResult<(EitherManifest, Vec)> { + let package_root = manifest_file.parent().unwrap(); + + let toml = { + let pretty_filename = manifest_file + .strip_prefix(config.cwd()) + .unwrap_or(manifest_file); + parse(contents, pretty_filename, config)? + }; + + let mut unused = BTreeSet::new(); + let manifest: TomlManifest = serde_ignored::deserialize(toml, |path| { + let mut key = String::new(); + stringify(&mut key, &path); + unused.insert(key); + })?; + let add_unused = |warnings: &mut Warnings| { + for key in unused { + warnings.add_warning(format!("unused manifest key: {}", key)); + if key == "profile.debug" || key == "profiles.debug" { + warnings.add_warning("use `[profile.dev]` to configure debug builds".to_string()); + } + } + }; + + let manifest = Rc::new(manifest); + return if manifest.project.is_some() || manifest.package.is_some() { + let (mut manifest, paths) = + TomlManifest::to_real_manifest(&manifest, source_id, package_root, config)?; + add_unused(manifest.warnings_mut()); + if !manifest.targets().iter().any(|t| !t.is_custom_build()) { + bail!( + "no targets specified in the manifest\n \ + either src/lib.rs, src/main.rs, a [lib] section, or \ + [[bin]] section must be present" + ) + } + Ok((EitherManifest::Real(manifest), paths)) + } else { + let (mut m, paths) = + TomlManifest::to_virtual_manifest(&manifest, source_id, package_root, config)?; + add_unused(m.warnings_mut()); + Ok((EitherManifest::Virtual(m), paths)) + }; + + fn stringify(dst: &mut String, path: &serde_ignored::Path<'_>) { + use serde_ignored::Path; + + match *path { + Path::Root => {} + Path::Seq { parent, index } => { + stringify(dst, parent); + if !dst.is_empty() { + dst.push('.'); + } + dst.push_str(&index.to_string()); + } + Path::Map { parent, ref key } => { + stringify(dst, parent); + if !dst.is_empty() { + dst.push('.'); + } + dst.push_str(key); + } + Path::Some { parent } + | Path::NewtypeVariant { parent } + | Path::NewtypeStruct { parent } => stringify(dst, parent), + } + } +} + +pub fn parse(toml: &str, file: &Path, config: &Config) -> CargoResult { + let first_error = match toml.parse() { + Ok(ret) => return Ok(ret), + Err(e) => e, + }; + + let mut second_parser = toml::de::Deserializer::new(toml); + second_parser.set_require_newline_after_table(false); + if let Ok(ret) = toml::Value::deserialize(&mut second_parser) { + let msg = format!( + "\ +TOML file found which contains invalid syntax and will soon not parse +at `{}`. + +The TOML spec requires newlines after table definitions (e.g., `[a] b = 1` is +invalid), but this file has a table header which does not have a newline after +it. A newline needs to be added and this warning will soon become a hard error +in the future.", + file.display() + ); + config.shell().warn(&msg)?; + return Ok(ret); + } + + let mut third_parser = toml::de::Deserializer::new(toml); + third_parser.set_allow_duplicate_after_longer_table(true); + if let Ok(ret) = toml::Value::deserialize(&mut third_parser) { + let msg = format!( + "\ +TOML file found which contains invalid syntax and will soon not parse +at `{}`. + +The TOML spec requires that each table header is defined at most once, but +historical versions of Cargo have erroneously accepted this file. The table +definitions will need to be merged together with one table header to proceed, +and this will become a hard error in the future.", + file.display() + ); + config.shell().warn(&msg)?; + return Ok(ret); + } + + let first_error = failure::Error::from(first_error); + Err(first_error.context("could not parse input as TOML").into()) +} + +type TomlLibTarget = TomlTarget; +type TomlBinTarget = TomlTarget; +type TomlExampleTarget = TomlTarget; +type TomlTestTarget = TomlTarget; +type TomlBenchTarget = TomlTarget; + +#[derive(Clone, Debug, Serialize)] +#[serde(untagged)] +pub enum TomlDependency { + Simple(String), + Detailed(DetailedTomlDependency), +} + +impl<'de> de::Deserialize<'de> for TomlDependency { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct TomlDependencyVisitor; + + impl<'de> de::Visitor<'de> for TomlDependencyVisitor { + type Value = TomlDependency; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str( + "a version string like \"0.9.8\" or a \ + detailed dependency like { version = \"0.9.8\" }", + ) + } + + fn visit_str(self, s: &str) -> Result + where + E: de::Error, + { + Ok(TomlDependency::Simple(s.to_owned())) + } + + fn visit_map(self, map: V) -> Result + where + V: de::MapAccess<'de>, + { + let mvd = de::value::MapAccessDeserializer::new(map); + DetailedTomlDependency::deserialize(mvd).map(TomlDependency::Detailed) + } + } + + deserializer.deserialize_any(TomlDependencyVisitor) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default)] +#[serde(rename_all = "kebab-case")] +pub struct DetailedTomlDependency { + version: Option, + registry: Option, + /// The URL of the `registry` field. + /// This is an internal implementation detail. When Cargo creates a + /// package, it replaces `registry` with `registry-index` so that the + /// manifest contains the correct URL. All users won't have the same + /// registry names configured, so Cargo can't rely on just the name for + /// crates published by other users. + registry_index: Option, + path: Option, + git: Option, + branch: Option, + tag: Option, + rev: Option, + features: Option>, + optional: Option, + default_features: Option, + #[serde(rename = "default_features")] + default_features2: Option, + package: Option, + public: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct TomlManifest { + cargo_features: Option>, + package: Option>, + project: Option>, + profile: Option, + lib: Option, + bin: Option>, + example: Option>, + test: Option>, + bench: Option>, + dependencies: Option>, + dev_dependencies: Option>, + #[serde(rename = "dev_dependencies")] + dev_dependencies2: Option>, + build_dependencies: Option>, + #[serde(rename = "build_dependencies")] + build_dependencies2: Option>, + features: Option>>, + target: Option>, + replace: Option>, + patch: Option>>, + workspace: Option, + badges: Option>>, +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default)] +pub struct TomlProfiles { + pub test: Option, + pub doc: Option, + pub bench: Option, + pub dev: Option, + pub release: Option, +} + +impl TomlProfiles { + pub fn validate(&self, features: &Features, warnings: &mut Vec) -> CargoResult<()> { + if let Some(ref test) = self.test { + test.validate("test", features, warnings)?; + } + if let Some(ref doc) = self.doc { + doc.validate("doc", features, warnings)?; + } + if let Some(ref bench) = self.bench { + bench.validate("bench", features, warnings)?; + } + if let Some(ref dev) = self.dev { + dev.validate("dev", features, warnings)?; + } + if let Some(ref release) = self.release { + release.validate("release", features, warnings)?; + } + Ok(()) + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct TomlOptLevel(pub String); + +impl<'de> de::Deserialize<'de> for TomlOptLevel { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = TomlOptLevel; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("an optimization level") + } + + fn visit_i64(self, value: i64) -> Result + where + E: de::Error, + { + Ok(TomlOptLevel(value.to_string())) + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + if value == "s" || value == "z" { + Ok(TomlOptLevel(value.to_string())) + } else { + Err(E::custom(format!( + "must be an integer, `z`, or `s`, \ + but found: {}", + value + ))) + } + } + } + + d.deserialize_any(Visitor) + } +} + +impl ser::Serialize for TomlOptLevel { + fn serialize(&self, serializer: S) -> Result + where + S: ser::Serializer, + { + match self.0.parse::() { + Ok(n) => n.serialize(serializer), + Err(_) => self.0.serialize(serializer), + } + } +} + +#[derive(Clone, Debug, Serialize, Eq, PartialEq)] +#[serde(untagged)] +pub enum U32OrBool { + U32(u32), + Bool(bool), +} + +impl<'de> de::Deserialize<'de> for U32OrBool { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = U32OrBool; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("a boolean or an integer") + } + + fn visit_bool(self, b: bool) -> Result + where + E: de::Error, + { + Ok(U32OrBool::Bool(b)) + } + + fn visit_i64(self, u: i64) -> Result + where + E: de::Error, + { + Ok(U32OrBool::U32(u as u32)) + } + + fn visit_u64(self, u: u64) -> Result + where + E: de::Error, + { + Ok(U32OrBool::U32(u as u32)) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +#[derive(Deserialize, Serialize, Clone, Debug, Default, Eq, PartialEq)] +#[serde(rename_all = "kebab-case")] +pub struct TomlProfile { + pub opt_level: Option, + pub lto: Option, + pub codegen_units: Option, + pub debug: Option, + pub debug_assertions: Option, + pub rpath: Option, + pub panic: Option, + pub overflow_checks: Option, + pub incremental: Option, + pub overrides: Option>, + pub build_override: Option>, +} + +#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] +pub enum ProfilePackageSpec { + Spec(PackageIdSpec), + All, +} + +impl ser::Serialize for ProfilePackageSpec { + fn serialize(&self, s: S) -> Result + where + S: ser::Serializer, + { + match *self { + ProfilePackageSpec::Spec(ref spec) => spec.serialize(s), + ProfilePackageSpec::All => "*".serialize(s), + } + } +} + +impl<'de> de::Deserialize<'de> for ProfilePackageSpec { + fn deserialize(d: D) -> Result + where + D: de::Deserializer<'de>, + { + let string = String::deserialize(d)?; + if string == "*" { + Ok(ProfilePackageSpec::All) + } else { + PackageIdSpec::parse(&string) + .map_err(de::Error::custom) + .map(ProfilePackageSpec::Spec) + } + } +} + +impl TomlProfile { + pub fn validate( + &self, + name: &str, + features: &Features, + warnings: &mut Vec, + ) -> CargoResult<()> { + if let Some(ref profile) = self.build_override { + features.require(Feature::profile_overrides())?; + profile.validate_override()?; + } + if let Some(ref override_map) = self.overrides { + features.require(Feature::profile_overrides())?; + for profile in override_map.values() { + profile.validate_override()?; + } + } + + match name { + "dev" | "release" => {} + _ => { + if self.overrides.is_some() || self.build_override.is_some() { + bail!( + "Profile overrides may only be specified for \ + `dev` or `release` profile, not `{}`.", + name + ); + } + } + } + + match name { + "doc" => { + warnings.push("profile `doc` is deprecated and has no effect".to_string()); + } + "test" | "bench" => { + if self.panic.is_some() { + warnings.push(format!("`panic` setting is ignored for `{}` profile", name)) + } + } + _ => {} + } + + if let Some(panic) = &self.panic { + if panic != "unwind" && panic != "abort" { + bail!( + "`panic` setting of `{}` is not a valid setting,\ + must be `unwind` or `abort`", + panic + ); + } + } + Ok(()) + } + + fn validate_override(&self) -> CargoResult<()> { + if self.overrides.is_some() || self.build_override.is_some() { + bail!("Profile overrides cannot be nested."); + } + if self.panic.is_some() { + bail!("`panic` may not be specified in a profile override.") + } + if self.lto.is_some() { + bail!("`lto` may not be specified in a profile override.") + } + if self.rpath.is_some() { + bail!("`rpath` may not be specified in a profile override.") + } + Ok(()) + } +} + +#[derive(Clone, Debug, Serialize, Eq, PartialEq)] +pub struct StringOrVec(Vec); + +impl<'de> de::Deserialize<'de> for StringOrVec { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = StringOrVec; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("string or list of strings") + } + + fn visit_str(self, s: &str) -> Result + where + E: de::Error, + { + Ok(StringOrVec(vec![s.to_string()])) + } + + fn visit_seq(self, v: V) -> Result + where + V: de::SeqAccess<'de>, + { + let seq = de::value::SeqAccessDeserializer::new(v); + Vec::deserialize(seq).map(StringOrVec) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +#[derive(Clone, Debug, Serialize, Eq, PartialEq)] +#[serde(untagged)] +pub enum StringOrBool { + String(String), + Bool(bool), +} + +impl<'de> de::Deserialize<'de> for StringOrBool { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = StringOrBool; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("a boolean or a string") + } + + fn visit_bool(self, b: bool) -> Result + where + E: de::Error, + { + Ok(StringOrBool::Bool(b)) + } + + fn visit_str(self, s: &str) -> Result + where + E: de::Error, + { + Ok(StringOrBool::String(s.to_string())) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +#[derive(Clone, Debug, Serialize)] +#[serde(untagged)] +pub enum VecStringOrBool { + VecString(Vec), + Bool(bool), +} + +impl<'de> de::Deserialize<'de> for VecStringOrBool { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + struct Visitor; + + impl<'de> de::Visitor<'de> for Visitor { + type Value = VecStringOrBool; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str("a boolean or vector of strings") + } + + fn visit_seq(self, v: V) -> Result + where + V: de::SeqAccess<'de>, + { + let seq = de::value::SeqAccessDeserializer::new(v); + Vec::deserialize(seq).map(VecStringOrBool::VecString) + } + + fn visit_bool(self, b: bool) -> Result + where + E: de::Error, + { + Ok(VecStringOrBool::Bool(b)) + } + } + + deserializer.deserialize_any(Visitor) + } +} + +/// Represents the `package`/`project` sections of a `Cargo.toml`. +/// +/// Note that the order of the fields matters, since this is the order they +/// are serialized to a TOML file. For example, you cannot have values after +/// the field `metadata`, since it is a table and values cannot appear after +/// tables. +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct TomlProject { + edition: Option, + name: String, + version: semver::Version, + authors: Option>, + build: Option, + metabuild: Option, + links: Option, + exclude: Option>, + include: Option>, + publish: Option, + #[serde(rename = "publish-lockfile")] + publish_lockfile: Option, + workspace: Option, + #[serde(rename = "im-a-teapot")] + im_a_teapot: Option, + autobins: Option, + autoexamples: Option, + autotests: Option, + autobenches: Option, + #[serde(rename = "namespaced-features")] + namespaced_features: Option, + #[serde(rename = "default-run")] + default_run: Option, + + // Package metadata. + description: Option, + homepage: Option, + documentation: Option, + readme: Option, + keywords: Option>, + categories: Option>, + license: Option, + #[serde(rename = "license-file")] + license_file: Option, + repository: Option, + metadata: Option, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct TomlWorkspace { + members: Option>, + #[serde(rename = "default-members")] + default_members: Option>, + exclude: Option>, +} + +impl TomlProject { + pub fn to_package_id(&self, source_id: SourceId) -> CargoResult { + PackageId::new(&self.name, self.version.clone(), source_id) + } +} + +struct Context<'a, 'b> { + pkgid: Option, + deps: &'a mut Vec, + source_id: SourceId, + nested_paths: &'a mut Vec, + config: &'b Config, + warnings: &'a mut Vec, + platform: Option, + root: &'a Path, + features: &'a Features, +} + +impl TomlManifest { + pub fn prepare_for_publish(&self, config: &Config) -> CargoResult { + let mut package = self + .package + .as_ref() + .or_else(|| self.project.as_ref()) + .unwrap() + .clone(); + package.workspace = None; + return Ok(TomlManifest { + package: Some(package), + project: None, + profile: self.profile.clone(), + lib: self.lib.clone(), + bin: self.bin.clone(), + example: self.example.clone(), + test: self.test.clone(), + bench: self.bench.clone(), + dependencies: map_deps(config, self.dependencies.as_ref())?, + dev_dependencies: map_deps( + config, + self.dev_dependencies + .as_ref() + .or_else(|| self.dev_dependencies2.as_ref()), + )?, + dev_dependencies2: None, + build_dependencies: map_deps( + config, + self.build_dependencies + .as_ref() + .or_else(|| self.build_dependencies2.as_ref()), + )?, + build_dependencies2: None, + features: self.features.clone(), + target: match self.target.as_ref().map(|target_map| { + target_map + .iter() + .map(|(k, v)| { + Ok(( + k.clone(), + TomlPlatform { + features: v.features.clone(), + dependencies: map_deps(config, v.dependencies.as_ref())?, + dev_dependencies: map_deps( + config, + v.dev_dependencies + .as_ref() + .or_else(|| v.dev_dependencies2.as_ref()), + )?, + dev_dependencies2: None, + build_dependencies: map_deps( + config, + v.build_dependencies + .as_ref() + .or_else(|| v.build_dependencies2.as_ref()), + )?, + build_dependencies2: None, + }, + )) + }) + .collect() + }) { + Some(Ok(v)) => Some(v), + Some(Err(e)) => return Err(e), + None => None, + }, + replace: None, + patch: None, + workspace: None, + badges: self.badges.clone(), + cargo_features: self.cargo_features.clone(), + }); + + fn map_deps( + config: &Config, + deps: Option<&BTreeMap>, + ) -> CargoResult>> { + let deps = match deps { + Some(deps) => deps, + None => return Ok(None), + }; + let deps = deps + .iter() + .map(|(k, v)| Ok((k.clone(), map_dependency(config, v)?))) + .collect::>>()?; + Ok(Some(deps)) + } + + fn map_dependency(config: &Config, dep: &TomlDependency) -> CargoResult { + match *dep { + TomlDependency::Detailed(ref d) => { + let mut d = d.clone(); + d.path.take(); // path dependencies become crates.io deps + // registry specifications are elaborated to the index URL + if let Some(registry) = d.registry.take() { + let src = SourceId::alt_registry(config, ®istry)?; + d.registry_index = Some(src.url().to_string()); + } + Ok(TomlDependency::Detailed(d)) + } + TomlDependency::Simple(ref s) => { + Ok(TomlDependency::Detailed(DetailedTomlDependency { + version: Some(s.clone()), + ..Default::default() + })) + } + } + } + } + + pub fn to_real_manifest( + me: &Rc, + source_id: SourceId, + package_root: &Path, + config: &Config, + ) -> CargoResult<(Manifest, Vec)> { + let mut nested_paths = vec![]; + let mut warnings = vec![]; + let mut errors = vec![]; + + // Parse features first so they will be available when parsing other parts of the TOML. + let empty = Vec::new(); + let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); + let features = Features::new(cargo_features, &mut warnings)?; + + let project = me.project.as_ref().or_else(|| me.package.as_ref()); + let project = project.ok_or_else(|| failure::format_err!("no `package` section found"))?; + + let package_name = project.name.trim(); + if package_name.is_empty() { + bail!("package name cannot be an empty string") + } + + validate_package_name(package_name, "package name", "")?; + + let pkgid = project.to_package_id(source_id)?; + + let edition = if let Some(ref edition) = project.edition { + features + .require(Feature::edition()) + .chain_err(|| "editions are unstable")?; + edition + .parse() + .chain_err(|| "failed to parse the `edition` key")? + } else { + Edition::Edition2015 + }; + + if project.metabuild.is_some() { + features.require(Feature::metabuild())?; + } + + // If we have no lib at all, use the inferred lib, if available. + // If we have a lib with a path, we're done. + // If we have a lib with no path, use the inferred lib or else the package name. + let targets = targets( + &features, + me, + package_name, + package_root, + edition, + &project.build, + &project.metabuild, + &mut warnings, + &mut errors, + )?; + + if targets.is_empty() { + debug!("manifest has no build targets"); + } + + if let Err(e) = unique_build_targets(&targets, package_root) { + warnings.push(format!( + "file found to be present in multiple \ + build targets: {}", + e + )); + } + + let mut deps = Vec::new(); + let mut ftrs = BTreeMap::new(); + let replace; + let patch; + + { + let mut cx = Context { + pkgid: Some(pkgid), + deps: &mut deps, + source_id, + nested_paths: &mut nested_paths, + config, + warnings: &mut warnings, + features: &features, + platform: None, + root: package_root, + }; + + fn process_dependencies( + cx: &mut Context<'_, '_>, + new_deps: Option<&BTreeMap>, + kind: Option, + ) -> CargoResult<()> { + let dependencies = match new_deps { + Some(dependencies) => dependencies, + None => return Ok(()), + }; + for (n, v) in dependencies.iter() { + let dep = v.to_dependency(n, cx, kind)?; + cx.deps.push(dep); + } + + Ok(()) + } + fn process_features( + ftrs: &mut BTreeMap, Vec)>, + new_ftrs: Option<&BTreeMap>>, + platform: Option<&Platform>, + ) -> CargoResult<()> { + let features = match new_ftrs { + Some(features) => features, + None => return Ok(()), + }; + for (n, v) in features.iter() { + ftrs.insert(n.clone(), (platform.cloned(), v.clone())); + } + + Ok(()) + } + + // Collect the dependencies. + process_dependencies(&mut cx, me.dependencies.as_ref(), None)?; + let dev_deps = me + .dev_dependencies + .as_ref() + .or_else(|| me.dev_dependencies2.as_ref()); + process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?; + let build_deps = me + .build_dependencies + .as_ref() + .or_else(|| me.build_dependencies2.as_ref()); + process_dependencies(&mut cx, build_deps, Some(Kind::Build))?; + process_features(&mut ftrs, me.features.as_ref(), None)?; + + for (name, platform) in me.target.iter().flat_map(|t| t) { + cx.platform = Some(name.parse()?); + process_dependencies(&mut cx, platform.dependencies.as_ref(), None)?; + let build_deps = platform + .build_dependencies + .as_ref() + .or_else(|| platform.build_dependencies2.as_ref()); + process_dependencies(&mut cx, build_deps, Some(Kind::Build))?; + let dev_deps = platform + .dev_dependencies + .as_ref() + .or_else(|| platform.dev_dependencies2.as_ref()); + process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?; + process_features(&mut ftrs, platform.features.as_ref(), cx.platform.as_ref())?; + } + + replace = me.replace(&mut cx)?; + patch = me.patch(&mut cx)?; + } + + { + let mut names_sources = BTreeMap::new(); + for dep in &deps { + let name = dep.name_in_toml(); + let prev = names_sources.insert(name.to_string(), dep.source_id()); + if prev.is_some() && prev != Some(dep.source_id()) { + bail!( + "Dependency '{}' has different source paths depending on the build \ + target. Each dependency must have a single canonical source path \ + irrespective of build target.", + name + ); + } + } + } + + let exclude = project.exclude.clone().unwrap_or_default(); + let include = project.include.clone().unwrap_or_default(); + if project.namespaced_features.is_some() { + features.require(Feature::namespaced_features())?; + } + + let summary = Summary::new( + pkgid, + deps, + &ftrs, + project.links.as_ref().map(|x| x.as_str()), + project.namespaced_features.unwrap_or(false), + )?; + let metadata = ManifestMetadata { + description: project.description.clone(), + homepage: project.homepage.clone(), + documentation: project.documentation.clone(), + readme: project.readme.clone(), + authors: project.authors.clone().unwrap_or_default(), + license: project.license.clone(), + license_file: project.license_file.clone(), + repository: project.repository.clone(), + keywords: project.keywords.clone().unwrap_or_default(), + categories: project.categories.clone().unwrap_or_default(), + badges: me.badges.clone().unwrap_or_default(), + links: project.links.clone(), + }; + + let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) { + (Some(config), None) => WorkspaceConfig::Root(WorkspaceRootConfig::new( + package_root, + &config.members, + &config.default_members, + &config.exclude, + )), + (None, root) => WorkspaceConfig::Member { + root: root.cloned(), + }, + (Some(..), Some(..)) => bail!( + "cannot configure both `package.workspace` and \ + `[workspace]`, only one can be specified" + ), + }; + let profiles = Profiles::new(me.profile.as_ref(), config, &features, &mut warnings)?; + let publish = match project.publish { + Some(VecStringOrBool::VecString(ref vecstring)) => Some(vecstring.clone()), + Some(VecStringOrBool::Bool(false)) => Some(vec![]), + None | Some(VecStringOrBool::Bool(true)) => None, + }; + + let publish_lockfile = match project.publish_lockfile { + Some(b) => { + features.require(Feature::publish_lockfile())?; + warnings.push( + "The `publish-lockfile` feature is deprecated and currently \ + has no effect. It may be removed in a future version." + .to_string(), + ); + b + } + None => features.is_enabled(Feature::publish_lockfile()), + }; + + if summary.features().contains_key("default-features") { + warnings.push( + "`default-features = [\"..\"]` was found in [features]. \ + Did you mean to use `default = [\"..\"]`?" + .to_string(), + ) + } + + if let Some(run) = &project.default_run { + if !targets + .iter() + .filter(|t| t.is_bin()) + .any(|t| t.name() == run) + { + let suggestion = + util::closest_msg(run, targets.iter().filter(|t| t.is_bin()), |t| t.name()); + bail!("default-run target `{}` not found{}", run, suggestion); + } + } + + let custom_metadata = project.metadata.clone(); + let mut manifest = Manifest::new( + summary, + targets, + exclude, + include, + project.links.clone(), + metadata, + custom_metadata, + profiles, + publish, + publish_lockfile, + replace, + patch, + workspace_config, + features, + edition, + project.im_a_teapot, + project.default_run.clone(), + Rc::clone(me), + project.metabuild.clone().map(|sov| sov.0), + ); + if project.license_file.is_some() && project.license.is_some() { + manifest.warnings_mut().add_warning( + "only one of `license` or \ + `license-file` is necessary" + .to_string(), + ); + } + for warning in warnings { + manifest.warnings_mut().add_warning(warning); + } + for error in errors { + manifest.warnings_mut().add_critical_warning(error); + } + + manifest.feature_gate()?; + + Ok((manifest, nested_paths)) + } + + fn to_virtual_manifest( + me: &Rc, + source_id: SourceId, + root: &Path, + config: &Config, + ) -> CargoResult<(VirtualManifest, Vec)> { + if me.project.is_some() { + bail!("virtual manifests do not define [project]"); + } + if me.package.is_some() { + bail!("virtual manifests do not define [package]"); + } + if me.lib.is_some() { + bail!("virtual manifests do not specify [lib]"); + } + if me.bin.is_some() { + bail!("virtual manifests do not specify [[bin]]"); + } + if me.example.is_some() { + bail!("virtual manifests do not specify [[example]]"); + } + if me.test.is_some() { + bail!("virtual manifests do not specify [[test]]"); + } + if me.bench.is_some() { + bail!("virtual manifests do not specify [[bench]]"); + } + if me.dependencies.is_some() { + bail!("virtual manifests do not specify [dependencies]"); + } + if me.dev_dependencies.is_some() || me.dev_dependencies2.is_some() { + bail!("virtual manifests do not specify [dev-dependencies]"); + } + if me.build_dependencies.is_some() || me.build_dependencies2.is_some() { + bail!("virtual manifests do not specify [build-dependencies]"); + } + if me.features.is_some() { + bail!("virtual manifests do not specify [features]"); + } + if me.target.is_some() { + bail!("virtual manifests do not specify [target]"); + } + if me.badges.is_some() { + bail!("virtual manifests do not specify [badges]"); + } + + let mut nested_paths = Vec::new(); + let mut warnings = Vec::new(); + let mut deps = Vec::new(); + let empty = Vec::new(); + let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); + let features = Features::new(cargo_features, &mut warnings)?; + + let (replace, patch) = { + let mut cx = Context { + pkgid: None, + deps: &mut deps, + source_id, + nested_paths: &mut nested_paths, + config, + warnings: &mut warnings, + platform: None, + features: &features, + root, + }; + (me.replace(&mut cx)?, me.patch(&mut cx)?) + }; + let profiles = Profiles::new(me.profile.as_ref(), config, &features, &mut warnings)?; + let workspace_config = match me.workspace { + Some(ref config) => WorkspaceConfig::Root(WorkspaceRootConfig::new( + root, + &config.members, + &config.default_members, + &config.exclude, + )), + None => { + bail!("virtual manifests must be configured with [workspace]"); + } + }; + Ok(( + VirtualManifest::new(replace, patch, workspace_config, profiles, features), + nested_paths, + )) + } + + fn replace(&self, cx: &mut Context<'_, '_>) -> CargoResult> { + if self.patch.is_some() && self.replace.is_some() { + bail!("cannot specify both [replace] and [patch]"); + } + let mut replace = Vec::new(); + for (spec, replacement) in self.replace.iter().flat_map(|x| x) { + let mut spec = PackageIdSpec::parse(spec).chain_err(|| { + format!( + "replacements must specify a valid semver \ + version to replace, but `{}` does not", + spec + ) + })?; + if spec.url().is_none() { + spec.set_url(CRATES_IO_INDEX.parse().unwrap()); + } + + let version_specified = match *replacement { + TomlDependency::Detailed(ref d) => d.version.is_some(), + TomlDependency::Simple(..) => true, + }; + if version_specified { + bail!( + "replacements cannot specify a version \ + requirement, but found one for `{}`", + spec + ); + } + + let mut dep = replacement.to_dependency(spec.name().as_str(), cx, None)?; + { + let version = spec.version().ok_or_else(|| { + failure::format_err!( + "replacements must specify a version \ + to replace, but `{}` does not", + spec + ) + })?; + dep.set_version_req(VersionReq::exact(version)); + } + replace.push((spec, dep)); + } + Ok(replace) + } + + fn patch(&self, cx: &mut Context<'_, '_>) -> CargoResult>> { + let mut patch = HashMap::new(); + for (url, deps) in self.patch.iter().flat_map(|x| x) { + let url = match &url[..] { + CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(), + _ => cx + .config + .get_registry_index(url) + .or_else(|_| url.into_url()) + .chain_err(|| { + format!("[patch] entry `{}` should be a URL or registry name", url) + })?, + }; + patch.insert( + url, + deps.iter() + .map(|(name, dep)| dep.to_dependency(name, cx, None)) + .collect::>>()?, + ); + } + Ok(patch) + } + + fn maybe_custom_build( + &self, + build: &Option, + package_root: &Path, + ) -> Option { + let build_rs = package_root.join("build.rs"); + match *build { + // Explicitly no build script. + Some(StringOrBool::Bool(false)) => None, + Some(StringOrBool::Bool(true)) => Some(build_rs), + Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)), + None => { + match fs::metadata(&build_rs) { + // If there is a `build.rs` file next to the `Cargo.toml`, assume it is + // a build script. + Ok(ref e) if e.is_file() => Some(build_rs), + Ok(_) | Err(_) => None, + } + } + } + } + + pub fn has_profiles(&self) -> bool { + self.profile.is_some() + } +} + +/// Checks a list of build targets, and ensures the target names are unique within a vector. +/// If not, the name of the offending build target is returned. +fn unique_build_targets(targets: &[Target], package_root: &Path) -> Result<(), String> { + let mut seen = HashSet::new(); + for target in targets { + if let TargetSourcePath::Path(path) = target.src_path() { + let full = package_root.join(path); + if !seen.insert(full.clone()) { + return Err(full.display().to_string()); + } + } + } + Ok(()) +} + +impl TomlDependency { + fn to_dependency( + &self, + name: &str, + cx: &mut Context<'_, '_>, + kind: Option, + ) -> CargoResult { + match *self { + TomlDependency::Simple(ref version) => DetailedTomlDependency { + version: Some(version.clone()), + ..Default::default() + } + .to_dependency(name, cx, kind), + TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind), + } + } +} + +impl DetailedTomlDependency { + fn to_dependency( + &self, + name_in_toml: &str, + cx: &mut Context<'_, '_>, + kind: Option, + ) -> CargoResult { + if self.version.is_none() && self.path.is_none() && self.git.is_none() { + let msg = format!( + "dependency ({}) specified without \ + providing a local path, Git repository, or \ + version to use. This will be considered an \ + error in future versions", + name_in_toml + ); + cx.warnings.push(msg); + } + + if let Some(version) = &self.version { + if version.contains('+') { + cx.warnings.push(format!( + "version requirement `{}` for dependency `{}` \ + includes semver metadata which will be ignored, removing the \ + metadata is recommended to avoid confusion", + version, name_in_toml + )); + } + } + + if self.git.is_none() { + let git_only_keys = [ + (&self.branch, "branch"), + (&self.tag, "tag"), + (&self.rev, "rev"), + ]; + + for &(key, key_name) in &git_only_keys { + if key.is_some() { + let msg = format!( + "key `{}` is ignored for dependency ({}). \ + This will be considered an error in future versions", + key_name, name_in_toml + ); + cx.warnings.push(msg) + } + } + } + + let new_source_id = match ( + self.git.as_ref(), + self.path.as_ref(), + self.registry.as_ref(), + self.registry_index.as_ref(), + ) { + (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!( + "dependency ({}) specification is ambiguous. \ + Only one of `git` or `registry` is allowed.", + name_in_toml + ), + (_, _, Some(_), Some(_)) => bail!( + "dependency ({}) specification is ambiguous. \ + Only one of `registry` or `registry-index` is allowed.", + name_in_toml + ), + (Some(git), maybe_path, _, _) => { + if maybe_path.is_some() { + let msg = format!( + "dependency ({}) specification is ambiguous. \ + Only one of `git` or `path` is allowed. \ + This will be considered an error in future versions", + name_in_toml + ); + cx.warnings.push(msg) + } + + let n_details = [&self.branch, &self.tag, &self.rev] + .iter() + .filter(|d| d.is_some()) + .count(); + + if n_details > 1 { + let msg = format!( + "dependency ({}) specification is ambiguous. \ + Only one of `branch`, `tag` or `rev` is allowed. \ + This will be considered an error in future versions", + name_in_toml + ); + cx.warnings.push(msg) + } + + let reference = self + .branch + .clone() + .map(GitReference::Branch) + .or_else(|| self.tag.clone().map(GitReference::Tag)) + .or_else(|| self.rev.clone().map(GitReference::Rev)) + .unwrap_or_else(|| GitReference::Branch("master".to_string())); + let loc = git.into_url()?; + SourceId::for_git(&loc, reference)? + } + (None, Some(path), _, _) => { + cx.nested_paths.push(PathBuf::from(path)); + // If the source ID for the package we're parsing is a path + // source, then we normalize the path here to get rid of + // components like `..`. + // + // The purpose of this is to get a canonical ID for the package + // that we're depending on to ensure that builds of this package + // always end up hashing to the same value no matter where it's + // built from. + if cx.source_id.is_path() { + let path = cx.root.join(path); + let path = util::normalize_path(&path); + SourceId::for_path(&path)? + } else { + cx.source_id + } + } + (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?, + (None, None, None, Some(registry_index)) => { + let url = registry_index.into_url()?; + SourceId::for_registry(&url)? + } + (None, None, None, None) => SourceId::crates_io(cx.config)?, + }; + + let (pkg_name, explicit_name_in_toml) = match self.package { + Some(ref s) => (&s[..], Some(name_in_toml)), + None => (name_in_toml, None), + }; + + let version = self.version.as_ref().map(|v| &v[..]); + let mut dep = match cx.pkgid { + Some(id) => Dependency::parse(pkg_name, version, new_source_id, id, cx.config)?, + None => Dependency::parse_no_deprecated(pkg_name, version, new_source_id)?, + }; + dep.set_features(self.features.iter().flat_map(|x| x)) + .set_default_features( + self.default_features + .or(self.default_features2) + .unwrap_or(true), + ) + .set_optional(self.optional.unwrap_or(false)) + .set_platform(cx.platform.clone()); + if let Some(registry) = &self.registry { + let registry_id = SourceId::alt_registry(cx.config, registry)?; + dep.set_registry_id(registry_id); + } + if let Some(registry_index) = &self.registry_index { + let url = registry_index.into_url()?; + let registry_id = SourceId::for_registry(&url)?; + dep.set_registry_id(registry_id); + } + + if let Some(kind) = kind { + dep.set_kind(kind); + } + if let Some(name_in_toml) = explicit_name_in_toml { + cx.features.require(Feature::rename_dependency())?; + dep.set_explicit_name_in_toml(name_in_toml); + } + + if let Some(p) = self.public { + cx.features.require(Feature::public_dependency())?; + + if dep.kind() != Kind::Normal { + bail!("'public' specifier can only be used on regular dependencies, not {:?} dependencies", dep.kind()); + } + + dep.set_public(p); + } + Ok(dep) + } +} + +#[derive(Default, Serialize, Deserialize, Debug, Clone)] +struct TomlTarget { + name: Option, + + // The intention was to only accept `crate-type` here but historical + // versions of Cargo also accepted `crate_type`, so look for both. + #[serde(rename = "crate-type")] + crate_type: Option>, + #[serde(rename = "crate_type")] + crate_type2: Option>, + + path: Option, + test: Option, + doctest: Option, + bench: Option, + doc: Option, + plugin: Option, + #[serde(rename = "proc-macro")] + proc_macro: Option, + #[serde(rename = "proc_macro")] + proc_macro2: Option, + harness: Option, + #[serde(rename = "required-features")] + required_features: Option>, + edition: Option, +} + +#[derive(Clone)] +struct PathValue(PathBuf); + +impl<'de> de::Deserialize<'de> for PathValue { + fn deserialize(deserializer: D) -> Result + where + D: de::Deserializer<'de>, + { + Ok(PathValue(String::deserialize(deserializer)?.into())) + } +} + +impl ser::Serialize for PathValue { + fn serialize(&self, serializer: S) -> Result + where + S: ser::Serializer, + { + self.0.serialize(serializer) + } +} + +/// Corresponds to a `target` entry, but `TomlTarget` is already used. +#[derive(Serialize, Deserialize, Debug)] +struct TomlPlatform { + features: Option>>, + dependencies: Option>, + #[serde(rename = "build-dependencies")] + build_dependencies: Option>, + #[serde(rename = "build_dependencies")] + build_dependencies2: Option>, + #[serde(rename = "dev-dependencies")] + dev_dependencies: Option>, + #[serde(rename = "dev_dependencies")] + dev_dependencies2: Option>, +} + +impl TomlTarget { + fn new() -> TomlTarget { + TomlTarget::default() + } + + fn name(&self) -> String { + match self.name { + Some(ref name) => name.clone(), + None => panic!("target name is required"), + } + } + + fn proc_macro(&self) -> Option { + self.proc_macro.or(self.proc_macro2).or_else(|| { + if let Some(types) = self.crate_types() { + if types.contains(&"proc-macro".to_string()) { + return Some(true); + } + } + None + }) + } + + fn crate_types(&self) -> Option<&Vec> { + self.crate_type + .as_ref() + .or_else(|| self.crate_type2.as_ref()) + } +} + +impl fmt::Debug for PathValue { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} diff --git a/src/cargo/util/toml/targets.rs b/src/cargo/util/toml/targets.rs new file mode 100644 index 00000000000..a834fad5e1f --- /dev/null +++ b/src/cargo/util/toml/targets.rs @@ -0,0 +1,823 @@ +//! This module implements Cargo conventions for directory layout: +//! +//! * `src/lib.rs` is a library +//! * `src/main.rs` is a binary +//! * `src/bin/*.rs` are binaries +//! * `examples/*.rs` are examples +//! * `tests/*.rs` are integration tests +//! * `benches/*.rs` are benchmarks +//! +//! It is a bit tricky because we need match explicit information from `Cargo.toml` +//! with implicit info in directory layout. + +use std::collections::HashSet; +use std::fs::{self, DirEntry}; +use std::path::{Path, PathBuf}; + +use super::{ + LibKind, PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, + TomlExampleTarget, TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget, +}; +use crate::core::{compiler, Edition, Feature, Features, Target}; +use crate::util::errors::{CargoResult, CargoResultExt}; + +pub fn targets( + features: &Features, + manifest: &TomlManifest, + package_name: &str, + package_root: &Path, + edition: Edition, + custom_build: &Option, + metabuild: &Option, + warnings: &mut Vec, + errors: &mut Vec, +) -> CargoResult> { + let mut targets = Vec::new(); + + let has_lib; + + if let Some(target) = clean_lib( + features, + manifest.lib.as_ref(), + package_root, + package_name, + edition, + warnings, + )? { + targets.push(target); + has_lib = true; + } else { + has_lib = false; + } + + let package = manifest + .package + .as_ref() + .or_else(|| manifest.project.as_ref()) + .ok_or_else(|| failure::format_err!("manifest has no `package` (or `project`)"))?; + + targets.extend(clean_bins( + features, + manifest.bin.as_ref(), + package_root, + package_name, + edition, + package.autobins, + warnings, + errors, + has_lib, + )?); + + targets.extend(clean_examples( + features, + manifest.example.as_ref(), + package_root, + edition, + package.autoexamples, + warnings, + errors, + )?); + + targets.extend(clean_tests( + features, + manifest.test.as_ref(), + package_root, + edition, + package.autotests, + warnings, + errors, + )?); + + targets.extend(clean_benches( + features, + manifest.bench.as_ref(), + package_root, + edition, + package.autobenches, + warnings, + errors, + )?); + + // processing the custom build script + if let Some(custom_build) = manifest.maybe_custom_build(custom_build, package_root) { + if metabuild.is_some() { + failure::bail!("cannot specify both `metabuild` and `build`"); + } + let name = format!( + "build-script-{}", + custom_build + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("") + ); + targets.push(Target::custom_build_target( + &name, + package_root.join(custom_build), + edition, + )); + } + if let Some(metabuild) = metabuild { + // Verify names match available build deps. + let bdeps = manifest.build_dependencies.as_ref(); + for name in &metabuild.0 { + if !bdeps.map_or(false, |bd| bd.contains_key(name)) { + failure::bail!( + "metabuild package `{}` must be specified in `build-dependencies`", + name + ); + } + } + + targets.push(Target::metabuild_target(&format!( + "metabuild-{}", + package.name + ))); + } + + Ok(targets) +} + +fn clean_lib( + features: &Features, + toml_lib: Option<&TomlLibTarget>, + package_root: &Path, + package_name: &str, + edition: Edition, + warnings: &mut Vec, +) -> CargoResult> { + let inferred = inferred_lib(package_root); + let lib = match toml_lib { + Some(lib) => { + if let Some(ref name) = lib.name { + // XXX: other code paths dodge this validation + if name.contains('-') { + failure::bail!("library target names cannot contain hyphens: {}", name) + } + } + Some(TomlTarget { + name: lib.name.clone().or_else(|| Some(package_name.to_owned())), + ..lib.clone() + }) + } + None => inferred.as_ref().map(|lib| TomlTarget { + name: Some(package_name.to_string()), + path: Some(PathValue(lib.clone())), + ..TomlTarget::new() + }), + }; + + let lib = match lib { + Some(ref lib) => lib, + None => return Ok(None), + }; + + validate_has_name(lib, "library", "lib")?; + + let path = match (lib.path.as_ref(), inferred) { + (Some(path), _) => package_root.join(&path.0), + (None, Some(path)) => path, + (None, None) => { + let legacy_path = package_root.join("src").join(format!("{}.rs", lib.name())); + if edition == Edition::Edition2015 && legacy_path.exists() { + warnings.push(format!( + "path `{}` was erroneously implicitly accepted for library `{}`,\n\ + please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", + legacy_path.display(), + lib.name() + )); + legacy_path + } else { + failure::bail!( + "can't find library `{}`, \ + rename file to `src/lib.rs` or specify lib.path", + lib.name() + ) + } + } + }; + + // Per the Macros 1.1 RFC: + // + // > Initially if a crate is compiled with the `proc-macro` crate type + // > (and possibly others) it will forbid exporting any items in the + // > crate other than those functions tagged #[proc_macro_derive] and + // > those functions must also be placed at the crate root. + // + // A plugin requires exporting plugin_registrar so a crate cannot be + // both at once. + let crate_types = match (lib.crate_types(), lib.plugin, lib.proc_macro()) { + (Some(kinds), _, _) if kinds.contains(&"proc-macro".to_string()) => { + if let Some(true) = lib.plugin { + // This is a warning to retain backwards compatibility. + warnings.push(format!( + "proc-macro library `{}` should not specify `plugin = true`", + lib.name() + )); + } + warnings.push(format!( + "library `{}` should only specify `proc-macro = true` instead of setting `crate-type`", + lib.name() + )); + if kinds.len() > 1 { + failure::bail!("cannot mix `proc-macro` crate type with others"); + } + vec![LibKind::ProcMacro] + } + (_, Some(true), Some(true)) => { + failure::bail!("`lib.plugin` and `lib.proc-macro` cannot both be `true`") + } + (Some(kinds), _, _) => kinds.iter().map(|s| s.into()).collect(), + (None, Some(true), _) => vec![LibKind::Dylib], + (None, _, Some(true)) => vec![LibKind::ProcMacro], + (None, _, _) => vec![LibKind::Lib], + }; + + let mut target = Target::lib_target(&lib.name(), crate_types, path, edition); + configure(features, lib, &mut target)?; + Ok(Some(target)) +} + +fn clean_bins( + features: &Features, + toml_bins: Option<&Vec>, + package_root: &Path, + package_name: &str, + edition: Edition, + autodiscover: Option, + warnings: &mut Vec, + errors: &mut Vec, + has_lib: bool, +) -> CargoResult> { + let inferred = inferred_bins(package_root, package_name); + + let bins = toml_targets_and_inferred( + toml_bins, + &inferred, + package_root, + autodiscover, + edition, + warnings, + "binary", + "bin", + "autobins", + ); + + for bin in &bins { + validate_has_name(bin, "binary", "bin")?; + + let name = bin.name(); + + if let Some(crate_types) = bin.crate_types() { + if !crate_types.is_empty() { + errors.push(format!( + "the target `{}` is a binary and can't have any \ + crate-types set (currently \"{}\")", + name, + crate_types.join(", ") + )); + } + } + + if bin.proc_macro() == Some(true) { + errors.push(format!( + "the target `{}` is a binary and can't have `proc-macro` \ + set `true`", + name + )); + } + + if compiler::is_bad_artifact_name(&name) { + failure::bail!("the binary target name `{}` is forbidden", name) + } + } + + validate_unique_names(&bins, "binary")?; + + let mut result = Vec::new(); + for bin in &bins { + let path = target_path(bin, &inferred, "bin", package_root, edition, &mut |_| { + if let Some(legacy_path) = legacy_bin_path(package_root, &bin.name(), has_lib) { + warnings.push(format!( + "path `{}` was erroneously implicitly accepted for binary `{}`,\n\ + please set bin.path in Cargo.toml", + legacy_path.display(), + bin.name() + )); + Some(legacy_path) + } else { + None + } + }); + let path = match path { + Ok(path) => path, + Err(e) => failure::bail!("{}", e), + }; + + let mut target = + Target::bin_target(&bin.name(), path, bin.required_features.clone(), edition); + configure(features, bin, &mut target)?; + result.push(target); + } + return Ok(result); + + fn legacy_bin_path(package_root: &Path, name: &str, has_lib: bool) -> Option { + if !has_lib { + let path = package_root.join("src").join(format!("{}.rs", name)); + if path.exists() { + return Some(path); + } + } + let path = package_root.join("src").join("main.rs"); + if path.exists() { + return Some(path); + } + + let path = package_root.join("src").join("bin").join("main.rs"); + if path.exists() { + return Some(path); + } + None + } +} + +fn clean_examples( + features: &Features, + toml_examples: Option<&Vec>, + package_root: &Path, + edition: Edition, + autodiscover: Option, + warnings: &mut Vec, + errors: &mut Vec, +) -> CargoResult> { + let inferred = infer_from_directory(&package_root.join("examples")); + + let targets = clean_targets( + "example", + "example", + toml_examples, + &inferred, + package_root, + edition, + autodiscover, + warnings, + errors, + "autoexamples", + )?; + + let mut result = Vec::new(); + for (path, toml) in targets { + let crate_types = match toml.crate_types() { + Some(kinds) => kinds.iter().map(|s| s.into()).collect(), + None => Vec::new(), + }; + + let mut target = Target::example_target( + &toml.name(), + crate_types, + path, + toml.required_features.clone(), + edition, + ); + configure(features, &toml, &mut target)?; + result.push(target); + } + + Ok(result) +} + +fn clean_tests( + features: &Features, + toml_tests: Option<&Vec>, + package_root: &Path, + edition: Edition, + autodiscover: Option, + warnings: &mut Vec, + errors: &mut Vec, +) -> CargoResult> { + let inferred = infer_from_directory(&package_root.join("tests")); + + let targets = clean_targets( + "test", + "test", + toml_tests, + &inferred, + package_root, + edition, + autodiscover, + warnings, + errors, + "autotests", + )?; + + let mut result = Vec::new(); + for (path, toml) in targets { + let mut target = + Target::test_target(&toml.name(), path, toml.required_features.clone(), edition); + configure(features, &toml, &mut target)?; + result.push(target); + } + Ok(result) +} + +fn clean_benches( + features: &Features, + toml_benches: Option<&Vec>, + package_root: &Path, + edition: Edition, + autodiscover: Option, + warnings: &mut Vec, + errors: &mut Vec, +) -> CargoResult> { + let mut legacy_warnings = vec![]; + + let targets = { + let mut legacy_bench_path = |bench: &TomlTarget| { + let legacy_path = package_root.join("src").join("bench.rs"); + if !(bench.name() == "bench" && legacy_path.exists()) { + return None; + } + legacy_warnings.push(format!( + "path `{}` was erroneously implicitly accepted for benchmark `{}`,\n\ + please set bench.path in Cargo.toml", + legacy_path.display(), + bench.name() + )); + Some(legacy_path) + }; + + let inferred = infer_from_directory(&package_root.join("benches")); + + clean_targets_with_legacy_path( + "benchmark", + "bench", + toml_benches, + &inferred, + package_root, + edition, + autodiscover, + warnings, + errors, + &mut legacy_bench_path, + "autobenches", + )? + }; + + warnings.append(&mut legacy_warnings); + + let mut result = Vec::new(); + for (path, toml) in targets { + let mut target = + Target::bench_target(&toml.name(), path, toml.required_features.clone(), edition); + configure(features, &toml, &mut target)?; + result.push(target); + } + + Ok(result) +} + +fn clean_targets( + target_kind_human: &str, + target_kind: &str, + toml_targets: Option<&Vec>, + inferred: &[(String, PathBuf)], + package_root: &Path, + edition: Edition, + autodiscover: Option, + warnings: &mut Vec, + errors: &mut Vec, + autodiscover_flag_name: &str, +) -> CargoResult> { + clean_targets_with_legacy_path( + target_kind_human, + target_kind, + toml_targets, + inferred, + package_root, + edition, + autodiscover, + warnings, + errors, + &mut |_| None, + autodiscover_flag_name, + ) +} + +fn clean_targets_with_legacy_path( + target_kind_human: &str, + target_kind: &str, + toml_targets: Option<&Vec>, + inferred: &[(String, PathBuf)], + package_root: &Path, + edition: Edition, + autodiscover: Option, + warnings: &mut Vec, + errors: &mut Vec, + legacy_path: &mut dyn FnMut(&TomlTarget) -> Option, + autodiscover_flag_name: &str, +) -> CargoResult> { + let toml_targets = toml_targets_and_inferred( + toml_targets, + inferred, + package_root, + autodiscover, + edition, + warnings, + target_kind_human, + target_kind, + autodiscover_flag_name, + ); + + for target in &toml_targets { + validate_has_name(target, target_kind_human, target_kind)?; + } + + validate_unique_names(&toml_targets, target_kind)?; + let mut result = Vec::new(); + for target in toml_targets { + let path = target_path( + &target, + inferred, + target_kind, + package_root, + edition, + legacy_path, + ); + let path = match path { + Ok(path) => path, + Err(e) => { + errors.push(e); + continue; + } + }; + result.push((path, target)); + } + Ok(result) +} + +fn inferred_lib(package_root: &Path) -> Option { + let lib = package_root.join("src").join("lib.rs"); + if fs::metadata(&lib).is_ok() { + Some(lib) + } else { + None + } +} + +fn inferred_bins(package_root: &Path, package_name: &str) -> Vec<(String, PathBuf)> { + let main = package_root.join("src").join("main.rs"); + let mut result = Vec::new(); + if main.exists() { + result.push((package_name.to_string(), main)); + } + result.extend(infer_from_directory(&package_root.join("src").join("bin"))); + + result +} + +fn infer_from_directory(directory: &Path) -> Vec<(String, PathBuf)> { + let entries = match fs::read_dir(directory) { + Err(_) => return Vec::new(), + Ok(dir) => dir, + }; + + entries + .filter_map(|e| e.ok()) + .filter(is_not_dotfile) + .filter_map(|d| infer_any(&d)) + .collect() +} + +fn infer_any(entry: &DirEntry) -> Option<(String, PathBuf)> { + if entry.path().extension().and_then(|p| p.to_str()) == Some("rs") { + infer_file(entry) + } else if entry.file_type().map(|t| t.is_dir()).ok() == Some(true) { + infer_subdirectory(entry) + } else { + None + } +} + +fn infer_file(entry: &DirEntry) -> Option<(String, PathBuf)> { + let path = entry.path(); + path.file_stem() + .and_then(|p| p.to_str()) + .map(|p| (p.to_owned(), path.clone())) +} + +fn infer_subdirectory(entry: &DirEntry) -> Option<(String, PathBuf)> { + let path = entry.path(); + let main = path.join("main.rs"); + let name = path.file_name().and_then(|n| n.to_str()); + match (name, main.exists()) { + (Some(name), true) => Some((name.to_owned(), main)), + _ => None, + } +} + +fn is_not_dotfile(entry: &DirEntry) -> bool { + entry.file_name().to_str().map(|s| s.starts_with('.')) == Some(false) +} + +fn toml_targets_and_inferred( + toml_targets: Option<&Vec>, + inferred: &[(String, PathBuf)], + package_root: &Path, + autodiscover: Option, + edition: Edition, + warnings: &mut Vec, + target_kind_human: &str, + target_kind: &str, + autodiscover_flag_name: &str, +) -> Vec { + let inferred_targets = inferred_to_toml_targets(inferred); + match toml_targets { + None => { + if let Some(false) = autodiscover { + vec![] + } else { + inferred_targets + } + } + Some(targets) => { + let mut targets = targets.clone(); + + let target_path = + |target: &TomlTarget| target.path.clone().map(|p| package_root.join(p.0)); + + let mut seen_names = HashSet::new(); + let mut seen_paths = HashSet::new(); + for target in targets.iter() { + seen_names.insert(target.name.clone()); + seen_paths.insert(target_path(target)); + } + + let mut rem_targets = vec![]; + for target in inferred_targets { + if !seen_names.contains(&target.name) && !seen_paths.contains(&target_path(&target)) + { + rem_targets.push(target); + } + } + + let autodiscover = match autodiscover { + Some(autodiscover) => autodiscover, + None => { + if edition == Edition::Edition2015 { + if !rem_targets.is_empty() { + let mut rem_targets_str = String::new(); + for t in rem_targets.iter() { + if let Some(p) = t.path.clone() { + rem_targets_str.push_str(&format!("* {}\n", p.0.display())) + } + } + warnings.push(format!( + "\ +An explicit [[{section}]] section is specified in Cargo.toml which currently +disables Cargo from automatically inferring other {target_kind_human} targets. +This inference behavior will change in the Rust 2018 edition and the following +files will be included as a {target_kind_human} target: + +{rem_targets_str} +This is likely to break cargo build or cargo test as these files may not be +ready to be compiled as a {target_kind_human} target today. You can future-proof yourself +and disable this warning by adding `{autodiscover_flag_name} = false` to your [package] +section. You may also move the files to a location where Cargo would not +automatically infer them to be a target, such as in subfolders. + +For more information on this warning you can consult +https://github.com/rust-lang/cargo/issues/5330", + section = target_kind, + target_kind_human = target_kind_human, + rem_targets_str = rem_targets_str, + autodiscover_flag_name = autodiscover_flag_name, + )); + }; + false + } else { + true + } + } + }; + + if autodiscover { + targets.append(&mut rem_targets); + } + + targets + } + } +} + +fn inferred_to_toml_targets(inferred: &[(String, PathBuf)]) -> Vec { + inferred + .iter() + .map(|&(ref name, ref path)| TomlTarget { + name: Some(name.clone()), + path: Some(PathValue(path.clone())), + ..TomlTarget::new() + }) + .collect() +} + +fn validate_has_name( + target: &TomlTarget, + target_kind_human: &str, + target_kind: &str, +) -> CargoResult<()> { + match target.name { + Some(ref name) => { + if name.trim().is_empty() { + failure::bail!("{} target names cannot be empty", target_kind_human) + } + } + None => failure::bail!( + "{} target {}.name is required", + target_kind_human, + target_kind + ), + } + + Ok(()) +} + +/// Will check a list of toml targets, and make sure the target names are unique within a vector. +fn validate_unique_names(targets: &[TomlTarget], target_kind: &str) -> CargoResult<()> { + let mut seen = HashSet::new(); + for name in targets.iter().map(|e| e.name()) { + if !seen.insert(name.clone()) { + failure::bail!( + "found duplicate {target_kind} name {name}, \ + but all {target_kind} targets must have a unique name", + target_kind = target_kind, + name = name + ); + } + } + Ok(()) +} + +fn configure(features: &Features, toml: &TomlTarget, target: &mut Target) -> CargoResult<()> { + let t2 = target.clone(); + target + .set_tested(toml.test.unwrap_or_else(|| t2.tested())) + .set_doc(toml.doc.unwrap_or_else(|| t2.documented())) + .set_doctest(toml.doctest.unwrap_or_else(|| t2.doctested())) + .set_benched(toml.bench.unwrap_or_else(|| t2.benched())) + .set_harness(toml.harness.unwrap_or_else(|| t2.harness())) + .set_proc_macro(toml.proc_macro.unwrap_or_else(|| t2.proc_macro())) + .set_for_host(match (toml.plugin, toml.proc_macro()) { + (None, None) => t2.for_host(), + (Some(true), _) | (_, Some(true)) => true, + (Some(false), _) | (_, Some(false)) => false, + }); + if let Some(edition) = toml.edition.clone() { + features + .require(Feature::edition()) + .chain_err(|| "editions are unstable")?; + target.set_edition( + edition + .parse() + .chain_err(|| "failed to parse the `edition` key")?, + ); + } + Ok(()) +} + +fn target_path( + target: &TomlTarget, + inferred: &[(String, PathBuf)], + target_kind: &str, + package_root: &Path, + edition: Edition, + legacy_path: &mut dyn FnMut(&TomlTarget) -> Option, +) -> Result { + if let Some(ref path) = target.path { + // Should we verify that this path exists here? + return Ok(package_root.join(&path.0)); + } + let name = target.name(); + + let mut matching = inferred + .iter() + .filter(|&&(ref n, _)| n == &name) + .map(|&(_, ref p)| p.clone()); + + let first = matching.next(); + let second = matching.next(); + match (first, second) { + (Some(path), None) => Ok(path), + (None, None) | (Some(_), Some(_)) => { + if edition == Edition::Edition2015 { + if let Some(path) = legacy_path(target) { + return Ok(path); + } + } + Err(format!( + "can't find `{name}` {target_kind}, specify {target_kind}.path", + name = name, + target_kind = target_kind + )) + } + (None, Some(_)) => unreachable!(), + } +} diff --git a/src/cargo/util/vcs.rs b/src/cargo/util/vcs.rs index d171ec5eb3c..01a8c1fa749 100644 --- a/src/cargo/util/vcs.rs +++ b/src/cargo/util/vcs.rs @@ -1,30 +1,105 @@ +use std::fs::create_dir; use std::path::Path; use git2; -use util::{CargoResult, process}; +use crate::util::{process, CargoResult}; + +// Check if we are in an existing repo. We define that to be true if either: +// +// 1. We are in a git repo and the path to the new package is not an ignored +// path in that repo. +// 2. We are in an HG repo. +pub fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool { + fn in_git_repo(path: &Path, cwd: &Path) -> bool { + if let Ok(repo) = GitRepo::discover(path, cwd) { + // Don't check if the working directory itself is ignored. + if repo.workdir().map_or(false, |workdir| workdir == path) { + true + } else { + !repo.is_path_ignored(path).unwrap_or(false) + } + } else { + false + } + } + + in_git_repo(path, cwd) || HgRepo::discover(path, cwd).is_ok() +} pub struct HgRepo; pub struct GitRepo; +pub struct PijulRepo; +pub struct FossilRepo; impl GitRepo { - pub fn init(path: &Path) -> CargoResult { - try!(git2::Repository::init(path)); - return Ok(GitRepo) + pub fn init(path: &Path, _: &Path) -> CargoResult { + git2::Repository::init(path)?; + Ok(GitRepo) } - pub fn discover(path: &Path) -> Result { + pub fn discover(path: &Path, _: &Path) -> Result { git2::Repository::discover(path) } } impl HgRepo { - pub fn init(path: &Path) -> CargoResult { - try!(try!(process("hg")).arg("init").arg(path).exec()); - return Ok(HgRepo) + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + process("hg").cwd(cwd).arg("init").arg(path).exec()?; + Ok(HgRepo) } - pub fn discover(path: &Path) -> CargoResult { - try!(try!(process("hg")).arg("root").cwd(path).exec_with_output()); - return Ok(HgRepo) + pub fn discover(path: &Path, cwd: &Path) -> CargoResult { + process("hg") + .cwd(cwd) + .arg("--cwd") + .arg(path) + .arg("root") + .exec_with_output()?; + Ok(HgRepo) + } +} + +impl PijulRepo { + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + process("pijul").cwd(cwd).arg("init").arg(path).exec()?; + Ok(PijulRepo) } } +impl FossilRepo { + pub fn init(path: &Path, cwd: &Path) -> CargoResult { + // fossil doesn't create the directory so we'll do that first + create_dir(path)?; + + // set up the paths we'll use + let db_fname = ".fossil"; + let mut db_path = path.to_owned(); + db_path.push(db_fname); + + // then create the fossil DB in that location + process("fossil") + .cwd(cwd) + .arg("init") + .arg(&db_path) + .exec()?; + + // open it in that new directory + process("fossil") + .cwd(&path) + .arg("open") + .arg(db_fname) + .exec()?; + + // set `target` as ignoreable and cleanable + process("fossil") + .cwd(cwd) + .arg("settings") + .arg("ignore-glob") + .arg("target"); + process("fossil") + .cwd(cwd) + .arg("settings") + .arg("clean-glob") + .arg("target"); + Ok(FossilRepo) + } +} diff --git a/src/cargo/util/workspace.rs b/src/cargo/util/workspace.rs new file mode 100644 index 00000000000..73ee04499b8 --- /dev/null +++ b/src/cargo/util/workspace.rs @@ -0,0 +1,75 @@ +use crate::core::{Target, Workspace}; +use crate::ops::CompileOptions; +use crate::util::CargoResult; + +use std::fmt::Write; + +fn get_available_targets<'a>( + filter_fn: fn(&Target) -> bool, + ws: &'a Workspace<'_>, + options: &'a CompileOptions<'_>, +) -> CargoResult> { + let packages = options.spec.get_packages(ws)?; + + let mut targets: Vec<_> = packages + .into_iter() + .flat_map(|pkg| { + pkg.manifest() + .targets() + .iter() + .filter(|target| filter_fn(target)) + }) + .collect(); + + targets.sort(); + + Ok(targets) +} + +fn print_available( + filter_fn: fn(&Target) -> bool, + ws: &Workspace<'_>, + options: &CompileOptions<'_>, + option_name: &str, + plural_name: &str, +) -> CargoResult<()> { + let targets = get_available_targets(filter_fn, ws, options)?; + + let mut output = String::new(); + writeln!(output, "\"{}\" takes one argument.", option_name)?; + + if targets.is_empty() { + writeln!(output, "No {} available.", plural_name)?; + } else { + writeln!(output, "Available {}:", plural_name)?; + for target in targets { + writeln!(output, " {}", target.name())?; + } + } + Err(failure::err_msg(output)) +} + +pub fn print_available_examples( + ws: &Workspace<'_>, + options: &CompileOptions<'_>, +) -> CargoResult<()> { + print_available(Target::is_example, ws, options, "--example", "examples") +} + +pub fn print_available_binaries( + ws: &Workspace<'_>, + options: &CompileOptions<'_>, +) -> CargoResult<()> { + print_available(Target::is_bin, ws, options, "--bin", "binaries") +} + +pub fn print_available_benches( + ws: &Workspace<'_>, + options: &CompileOptions<'_>, +) -> CargoResult<()> { + print_available(Target::is_bench, ws, options, "--bench", "benches") +} + +pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions<'_>) -> CargoResult<()> { + print_available(Target::is_test, ws, options, "--test", "tests") +} diff --git a/src/crates-io/lib.rs b/src/crates-io/lib.rs deleted file mode 100644 index ea19883be6b..00000000000 --- a/src/crates-io/lib.rs +++ /dev/null @@ -1,271 +0,0 @@ -extern crate curl; -extern crate rustc_serialize; - -use std::collections::HashMap; -use std::fmt; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::io::{self, Cursor}; -use std::path::Path; -use std::result; - -use curl::http; -use curl::http::handle::Method::{Put, Get, Delete}; -use curl::http::handle::{Method, Request}; -use rustc_serialize::json; - -pub struct Registry { - host: String, - token: Option, - handle: http::Handle, -} - -pub type Result = result::Result; - -#[derive(PartialEq, Clone, Copy)] -pub enum Auth { - Authorized, - Unauthorized -} - -pub enum Error { - Curl(curl::ErrCode), - NotOkResponse(http::Response), - NonUtf8Body, - Api(Vec), - Unauthorized, - TokenMissing, - Io(io::Error), - NotFound, -} - -#[derive(RustcDecodable)] -pub struct Crate { - pub name: String, - pub description: Option, - pub max_version: String -} - -#[derive(RustcEncodable)] -pub struct NewCrate { - pub name: String, - pub vers: String, - pub deps: Vec, - pub features: HashMap>, - pub authors: Vec, - pub description: Option, - pub documentation: Option, - pub homepage: Option, - pub readme: Option, - pub keywords: Vec, - pub license: Option, - pub license_file: Option, - pub repository: Option, -} - -#[derive(RustcEncodable)] -pub struct NewCrateDependency { - pub optional: bool, - pub default_features: bool, - pub name: String, - pub features: Vec, - pub version_req: String, - pub target: Option, - pub kind: String, -} - -#[derive(RustcDecodable)] -pub struct User { - pub id: u32, - pub login: String, - pub avatar: Option, - pub email: Option, - pub name: Option, -} - -#[derive(RustcDecodable)] struct R { ok: bool } -#[derive(RustcDecodable)] struct ApiErrorList { errors: Vec } -#[derive(RustcDecodable)] struct ApiError { detail: String } -#[derive(RustcEncodable)] struct OwnersReq<'a> { users: &'a [&'a str] } -#[derive(RustcDecodable)] struct Users { users: Vec } -#[derive(RustcDecodable)] struct Crates { crates: Vec } - -impl Registry { - pub fn new(host: String, token: Option) -> Registry { - Registry::new_handle(host, token, http::Handle::new()) - } - - pub fn new_handle(host: String, token: Option, - handle: http::Handle) -> Registry { - Registry { - host: host, - token: token, - handle: handle, - } - } - - pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { - let body = json::encode(&OwnersReq { users: owners }).unwrap(); - let body = try!(self.put(format!("/crates/{}/owners", krate), - body.as_bytes())); - assert!(json::decode::(&body).unwrap().ok); - Ok(()) - } - - pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { - let body = json::encode(&OwnersReq { users: owners }).unwrap(); - let body = try!(self.delete(format!("/crates/{}/owners", krate), - Some(body.as_bytes()))); - assert!(json::decode::(&body).unwrap().ok); - Ok(()) - } - - pub fn list_owners(&mut self, krate: &str) -> Result> { - let body = try!(self.get(format!("/crates/{}/owners", krate))); - Ok(json::decode::(&body).unwrap().users) - } - - pub fn publish(&mut self, krate: &NewCrate, tarball: &Path) -> Result<()> { - let json = json::encode(krate).unwrap(); - // Prepare the body. The format of the upload request is: - // - // - // (metadata for the package) - // - // - let stat = try!(fs::metadata(tarball).map_err(Error::Io)); - let header = { - let mut w = Vec::new(); - w.extend([ - (json.len() >> 0) as u8, - (json.len() >> 8) as u8, - (json.len() >> 16) as u8, - (json.len() >> 24) as u8, - ].iter().map(|x| *x)); - w.extend(json.as_bytes().iter().map(|x| *x)); - w.extend([ - (stat.len() >> 0) as u8, - (stat.len() >> 8) as u8, - (stat.len() >> 16) as u8, - (stat.len() >> 24) as u8, - ].iter().map(|x| *x)); - w - }; - let tarball = try!(File::open(tarball).map_err(Error::Io)); - let size = stat.len() as usize + header.len(); - let mut body = Cursor::new(header).chain(tarball); - - let url = format!("{}/api/v1/crates/new", self.host); - - let token = match self.token.as_ref() { - Some(s) => s, - None => return Err(Error::TokenMissing), - }; - let request = self.handle.put(url, &mut body) - .content_length(size) - .header("Accept", "application/json") - .header("Authorization", &token); - let response = handle(request.exec()); - let _body = try!(response); - Ok(()) - } - - pub fn search(&mut self, query: &str) -> Result> { - let body = try!(self.req(format!("/crates?q={}", query), None, Get, - Auth::Unauthorized)); - - Ok(json::decode::(&body).unwrap().crates) - } - - pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> { - let body = try!(self.delete(format!("/crates/{}/{}/yank", krate, version), - None)); - assert!(json::decode::(&body).unwrap().ok); - Ok(()) - } - - pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> { - let body = try!(self.put(format!("/crates/{}/{}/unyank", krate, version), - &[])); - assert!(json::decode::(&body).unwrap().ok); - Ok(()) - } - - fn put(&mut self, path: String, b: &[u8]) -> Result { - self.req(path, Some(b), Put, Auth::Authorized) - } - - fn get(&mut self, path: String) -> Result { - self.req(path, None, Get, Auth::Authorized) - } - - fn delete(&mut self, path: String, b: Option<&[u8]>) -> Result { - self.req(path, b, Delete, Auth::Authorized) - } - - fn req(&mut self, path: String, body: Option<&[u8]>, - method: Method, authorized: Auth) -> Result { - let mut req = Request::new(&mut self.handle, method) - .uri(format!("{}/api/v1{}", self.host, path)) - .header("Accept", "application/json") - .content_type("application/json"); - - if authorized == Auth::Authorized { - let token = match self.token.as_ref() { - Some(s) => s, - None => return Err(Error::TokenMissing), - }; - req = req.header("Authorization", &token); - } - match body { - Some(b) => req = req.body(b), - None => {} - } - handle(req.exec()) - } -} - -fn handle(response: result::Result) - -> Result { - let response = try!(response.map_err(Error::Curl)); - match response.get_code() { - 0 => {} // file upload url sometimes - 200 => {} - 403 => return Err(Error::Unauthorized), - 404 => return Err(Error::NotFound), - _ => return Err(Error::NotOkResponse(response)) - } - - let body = match String::from_utf8(response.move_body()) { - Ok(body) => body, - Err(..) => return Err(Error::NonUtf8Body), - }; - match json::decode::(&body) { - Ok(errors) => { - return Err(Error::Api(errors.errors.into_iter().map(|s| s.detail) - .collect())) - } - Err(..) => {} - } - Ok(body) -} - -impl fmt::Display for Error { - #[allow(deprecated)] // connect => join in 1.3 - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::NonUtf8Body => write!(f, "response body was not utf-8"), - Error::Curl(ref err) => write!(f, "http error: {}", err), - Error::NotOkResponse(ref resp) => { - write!(f, "failed to get a 200 OK response: {}", resp) - } - Error::Api(ref errs) => { - write!(f, "api errors: {}", errs.connect(", ")) - } - Error::Unauthorized => write!(f, "unauthorized API access"), - Error::TokenMissing => write!(f, "no upload token found, please run `cargo login`"), - Error::Io(ref e) => write!(f, "io error: {}", e), - Error::NotFound => write!(f, "cannot find crate"), - } - } -} diff --git a/src/doc/.gitignore b/src/doc/.gitignore new file mode 100644 index 00000000000..3155cd2e4d8 --- /dev/null +++ b/src/doc/.gitignore @@ -0,0 +1,2 @@ +# Ignore built book +book/ diff --git a/src/doc/CNAME b/src/doc/CNAME deleted file mode 100644 index b68cc5511a4..00000000000 --- a/src/doc/CNAME +++ /dev/null @@ -1 +0,0 @@ -doc.crates.io diff --git a/src/doc/Makefile b/src/doc/Makefile new file mode 100644 index 00000000000..505532d2be8 --- /dev/null +++ b/src/doc/Makefile @@ -0,0 +1,28 @@ +# This Makefile is used to build the Cargo man pages. +# +# The source for the man pages are located in src/doc/man in Asciidoctor +# format. See https://asciidoctor.org/ for more information. +# +# Just run `make` and it will generate the man pages in src/etc/man and the +# HTML pages in src/doc/man/generated. +# +# There are some Asciidoctor extensions, see the file `asciidoc-extensions.rb` +# for the documentation. + +MAN_SOURCE = $(sort $(wildcard man/cargo*.adoc)) +COMMANDS = $(notdir $(MAN_SOURCE)) +HTML = $(patsubst %.adoc,man/generated/%.html,$(COMMANDS)) +MAN_LOCATION = ../etc/man +MAN = $(patsubst %.adoc,$(MAN_LOCATION)/%.1,$(COMMANDS)) +ASCIIDOCOPTS = -r ./asciidoc-extension.rb +OTHER_DEPS = asciidoc-extension.rb $(filter-out $(MAN_SOURCE),$(sort $(wildcard man/*.adoc))) + +all: commands-html man +commands-html: $(HTML) +man: $(MAN) + +$(HTML): man/generated/%.html : man/%.adoc asciidoc-extension.rb $(OTHER_DEPS) + asciidoctor $(ASCIIDOCOPTS) -s $< -o $@ + +$(MAN): $(MAN_LOCATION)/%.1 : man/%.adoc $(OTHER_DEPS) + asciidoctor $(ASCIIDOCOPTS) -b manpage $< -o $@ diff --git a/src/doc/README.md b/src/doc/README.md new file mode 100644 index 00000000000..d4d334658bc --- /dev/null +++ b/src/doc/README.md @@ -0,0 +1,47 @@ +# The Cargo Book + + +### Requirements + +Building the book requires [mdBook]. To get it: + +[mdBook]: https://github.com/rust-lang-nursery/mdBook + +```console +$ cargo install mdbook +``` + +### Building + +To build the book: + +```console +$ mdbook build +``` + +The output will be in the `book` subdirectory. To check it out, open it in +your web browser. + +_Firefox:_ +```console +$ firefox book/index.html # Linux +$ open -a "Firefox" book/index.html # OS X +$ Start-Process "firefox.exe" .\book\index.html # Windows (PowerShell) +$ start firefox.exe .\book\index.html # Windows (Cmd) +``` + +_Chrome:_ +```console +$ google-chrome book/index.html # Linux +$ open -a "Google Chrome" book/index.html # OS X +$ Start-Process "chrome.exe" .\book\index.html # Windows (PowerShell) +$ start chrome.exe .\book\index.html # Windows (Cmd) +``` + + +## Contributing + +Given that the book is still in a draft state, we'd love your help! Please feel free to open +issues about anything, and send in PRs for things you'd like to fix or change. If your change is +large, please open an issue first, so we can make sure that it's something we'd accept before you +go through the work of getting a PR together. diff --git a/src/doc/asciidoc-extension.rb b/src/doc/asciidoc-extension.rb new file mode 100644 index 00000000000..87f4f2a2731 --- /dev/null +++ b/src/doc/asciidoc-extension.rb @@ -0,0 +1,110 @@ +require 'asciidoctor/extensions' unless RUBY_ENGINE == 'opal' + +include Asciidoctor + +# An inline macro that generates links to related man pages. +# +# Usage +# +# man:gittutorial[7] +# +class ManInlineMacro < Extensions::InlineMacroProcessor + use_dsl + + named :man + name_positional_attributes 'volnum' + + def process parent, target, attrs + manname = target + suffix = if (volnum = attrs['volnum']) + "(#{volnum})" + else + nil + end + text = %(#{manname}#{suffix}) + if parent.document.basebackend? 'html' + parent.document.register :links, target + if manname == 'rustc' + html_target = 'https://doc.rust-lang.org/rustc/index.html' + elsif manname == 'rustdoc' + html_target = 'https://doc.rust-lang.org/rustdoc/index.html' + elsif manname == 'cargo' + html_target = 'index.html' + else + html_target = %(#{manname}.html) + end + %(#{(create_anchor parent, text, type: :link, target: html_target).render}) + elsif parent.document.backend == 'manpage' + %(\x1b\\fB#{manname}\x1b\\fP#{suffix}) + else + text + end + end +end + +# Creates a link to something in the cargo documentation. +# +# For HTML this creates a relative link. For the man page it gives a direct +# link to doc.rust-lang.org. +# +# Usage +# +# linkcargo:reference/manifest.html[the manifest] +# +class LinkCargoInlineMacro < Extensions::InlineMacroProcessor + use_dsl + + named :linkcargo + name_positional_attributes 'text' + + def process parent, target, attrs + text = attrs['text'] + if parent.document.basebackend? 'html' + target = %(../#{target}) + parent.document.register :links, target + %(#{(create_anchor parent, text, type: :link, target: target).render}) + elsif parent.document.backend == 'manpage' + target = %(https://doc.rust-lang.org/cargo/#{target}) + %(#{(create_anchor parent, text, type: :link, target: target).render}) + else + %(#{text} <#{target}>) + end + end +end + +# Backticks in the manpage renderer use the CR font (courier), but in most +# cases in a terminal this doesn't look any different. Instead, use bold which +# should follow man page conventions better. +class MonoPostprocessor < Extensions::Postprocessor + def process document, output + if document.basebackend? 'manpage' + output = output.gsub(/\\f\(CR/, '\\fB') + end + output + end +end + +# General utility for converting text. Example: +# +# convert:lowercase[{somevar}] +class ConvertInlineMacro < Extensions::InlineMacroProcessor + use_dsl + + named :convert + name_positional_attributes 'text' + + def process parent, target, attrs + text = attrs['text'] + case target + when 'lowercase' + text.downcase + end + end +end + +Extensions.register :uri_schemes do + inline_macro ManInlineMacro + inline_macro LinkCargoInlineMacro + inline_macro ConvertInlineMacro + postprocessor MonoPostprocessor +end diff --git a/src/doc/book.toml b/src/doc/book.toml new file mode 100644 index 00000000000..8ba2656ec44 --- /dev/null +++ b/src/doc/book.toml @@ -0,0 +1,6 @@ +[book] +title = "The Cargo Book" +author = "Alex Crichton, Steve Klabnik and Carol Nichols, with Contributions from the Rust Community" + +[output.html] +git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/src" diff --git a/src/doc/config.md b/src/doc/config.md deleted file mode 100644 index ff7b11d6ba7..00000000000 --- a/src/doc/config.md +++ /dev/null @@ -1,96 +0,0 @@ -% Configuration - Cargo Documentation - -This document will explain how cargo's configuration system works, as well as -available keys or configuration. For configuration of a project through its -manifest, see the [manifest format](manifest.html). - -# Hierarchical structure - -Cargo allows to have local configuration for a particular project or global -configuration (like git). Cargo also extends this ability to a hierarchical -strategy. If, for example, cargo were invoked in `/home/foo/bar/baz`, then the -following configuration files would be probed for: - -* `/home/foo/bar/baz/.cargo/config` -* `/home/foo/bar/.cargo/config` -* `/home/foo/.cargo/config` -* `/home/.cargo/config` -* `/.cargo/config` - -With this structure you can specify local configuration per-project, and even -possibly check it into version control. You can also specify personal default -with a configuration file in your home directory. - -# Configuration Format - -All configuration is currently in the [TOML format][toml] (like the manifest), -with simple key-value pairs inside of sections (tables) which all get merged -together. - -[toml]: https://github.com/toml-lang/toml - -# Configuration keys - -All of the following keys are optional, and their defaults are listed as their -value unless otherwise noted. - -Key values that specify a tool may be given as an absolute path, a relative path -or as a pathless tool name. Absolute paths and pathless tool names are used as -given. Relative paths are resolved relative to the parent directory of the -`.cargo` directory of the config file that the value resides within. - -```toml -# An array of paths to local repositories which are to be used as overrides for -# dependencies. For more information see the Cargo Guide. -paths = ["/path/to/override"] - -[cargo-new] -# This is your name/email to place in the `authors` section of a new Cargo.toml -# that is generated. If not present, then `git` will be probed, and if that is -# not present then `$USER` and `$EMAIL` will be used. -name = "..." -email = "..." - -# By default `cargo new` will initialize a new git repository. This key can be -# set to `none` to disable this behavior. -vcs = "none" - -# For the following sections, $triple refers to any valid target triple, not the -# literal string "$triple", and it will apply whenever that target triple is -# being compiled to. -[target] -# For cargo builds which do not mention --target, these are the ar/linker tools -# which are passed to rustc to use (via `-C ar=` and `-C linker=`). By default -# these flags are not passed to the compiler. -ar = ".." -linker = ".." - -[target.$triple] -# Similar to the above ar/linker tool configuration, but this only applies to -# when the `$triple` is being compiled for. -ar = ".." -linker = ".." - -# Configuration keys related to the registry -[registry] -index = "..." # URL of the registry index (defaults to the central repository) -token = "..." # Access token (found on the central repo's website) - -[http] -proxy = "..." # HTTP proxy to use for HTTP requests (defaults to none) -timeout = 60000 # Timeout for each HTTP request, in milliseconds - -[build] -jobs = 1 # number of jobs to run by default (default to # cpus) -rustc = "rustc" # the rust compiler tool -rustdoc = "rustdoc" # the doc generator tool -target-dir = "target" # path of where to place all generated artifacts -``` - -# Environment Variables - -Cargo recognizes a few global [environment variables][env] to configure itself. -Settings specified via config files take precedence over those specified via -environment variables. - -[env]: environment-variables.html diff --git a/src/doc/crates-io.md b/src/doc/crates-io.md deleted file mode 100644 index 4a8f5c433f7..00000000000 --- a/src/doc/crates-io.md +++ /dev/null @@ -1,313 +0,0 @@ -% Cargo and crates.io - -In addition to using dependencies from git repositories (as mentioned in -[the guide](guide.html)) Cargo can also publish to and download from the -[crates.io][crates-io] central repository. This site serves as a location to -discover and download packages, and `cargo` is configured to use it by default -to find requested packages. - -The guide will explain how crates can use crates.io through the `cargo` command -line tool. - -[crates-io]: https://crates.io/ - -# Using crates.io-based crates - -The method of specifying a dependency on a crate from crates.io is slightly -different than the method of specifying a dependency on a git repository. The -syntax for doing so is: - -```toml -[dependencies] -glob = "0.0.3" -``` - -With this format, adding new dependencies should just add a new line, you don't -need to add `[dependencies]` for each dependency listed, for example: - -```toml -[dependencies] -glob = "0.0.3" -num = "0.0.4" -``` - -The string value for each key in this table is a [semver][semver] version -requirement. - -[semver]: http://doc.rust-lang.org/semver/semver/#requirements - -**Caret requirements** allow SemVer compatible updates to a specified version. - -`^1.2.3` is an example of a caret requirement. - -When considering ‘compatible’ versions, `0.1` and `0.2` are not considered -compatible, but `1.0` and `1.1` are for example. If no operator is specified, -this is the default requirement (e.g. `1.3` is the same as `^1.3`). - -`0.0.x` is not considered compatible with any other version. Missing minor and -patch versions are desugared to `0` but allow flexibility for that value. - -```notrust -^1.2.3 := >=1.2.3 <2.0.0 -^0.2.3 := >=0.2.3 <0.3.0 -^0.0.3 := >=0.0.3 <0.0.4 -^0.0 := >=0.0.0 <0.1.0 -^0 := >=0.0.0 <1.0.0 -``` - -**Tilde requirements** specify a minimal version with some ability to update. - -`~1.2.3` is an example of a tilde requirement. - -```notrust -~1.2.3 := >=1.2.3 <1.3.0 -~1.2 := >=1.2.0 <1.3.0 -~1 := >=1.0.0 <2.0.0 -``` - -**Wildcard requirements** allow for any version where the wildcard is positioned. - -`*`, `1.*` and `1.2.*` are examples of wildcard requirements. - -```notrust -* := >=0.0.0 -1.* := >=1.0.0 <2.0.0 -1.2.* := >=1.2.0 <1.3.0 -``` - -**Inequality requirements** allow manually specifying a version range or an -exact version to depend on. - -Here are some examples of wildcard requirements: - -```notrust ->= 1.2.0 -> 1 -< 2 -= 1.2.3 -``` - -Multiple version requirements can also be separated with a comma, e.g. `>= 1.2, -< 1.5`. - -# Pre-1.0 versions - -While SemVer says that there is no compatibility before 1.0.0, many programmers -treat a `0.x.y` release in the same way as a `1.x.y` release: that is, `y` is -incremented for bugfixes, and `x` is incremented for new features. - -As such, Cargo considers a `0.x.y` and `0.x.z` version, where `z > y`, to be -compatible. - -# Publishing crates - -Ok, now that we've got a crate which is using dependencies from crates.io, -let's publish it! Publishing a crate is when a specific version is uploaded to -crates.io. - -Take care when publishing a crate, because a publish is **permanent**. The -version can never be overwritten, and the code cannot be deleted. There is no -limit to the number of versions which can be published, however. - -## Acquiring an API token - -First thing's first, you'll need an account on [crates.io][crates-io] to acquire -an API token. To do so, [visit the home page][crates-io] and log in via a GitHub -account (required for now). After this, visit your [Account -Settings](https://crates.io/me) page and run the `cargo login` command -specified. - -```notrust -$ cargo login abcdefghijklmnopqrstuvwxyz012345 -``` - -This command will inform Cargo of your API token and store it locally in your -`~/.cargo/config`. Note that this token is a **secret** and should not be shared -with anyone else. If it leaks for any reason, you should regenerate it -immediately. - -## Packaging a crate - -The next step is to package up your crate into a format that can be uploaded to -crates.io. For this we'll use the `cargo package` subcommand. This will take -our entire crate and package it all up into a `*.crate` file in the -`target/package` directory. - -```notrust -$ cargo package -``` - -As an added bonus, the `*.crate` will be verified independently of the current -source tree. After the `*.crate` is created, it's unpacked into -`target/package` and then built from scratch to ensure that all necessary files -are there for the build to succeed. This behavior can be disabled with the -`--no-verify` flag. - -Now's a good time to take a look at the `*.crate` file to make sure you didn't -accidentally package up that 2GB video asset. Cargo will automatically ignore -files ignored by your version control system when packaging, but if you want to -specify an extra set of files to ignore you can use the `exclude` key in the -manifest: - -```toml -[package] -# ... -exclude = [ - "public/assets/*", - "videos/*", -] -``` - -The syntax of each element in this array is what -[rust-lang/glob](https://github.com/rust-lang/glob) accepts. If you'd rather -roll with a whitelist instead of a blacklist, Cargo also supports an `include` -key: - -```toml -[package] -# ... -include = [ - "**/*.rs", - "Cargo.toml", -] -``` - -## Uploading the crate - -Now that we've got a `*.crate` file ready to go, it can be uploaded to -crates.io with the `cargo publish` command. And that's it, you've now published -your first crate! - -```notrust -$ cargo publish -``` - -If you'd like to skip the `cargo package` step, the `cargo publish` subcommand -will automatically package up the local crate if a copy isn't found already. - -Be sure to check out the [metadata you can -specify](manifest.html#package-metadata) to ensure your crate can be discovered -more easily! - -## Restrictions - -There are a few restrictions when publishing a crate in the registry: - -* Once a version is uploaded, it can never be overwritten. To upload a new copy - of a crate you must upload a new version. -* Crate names are allocated on a first-come-first-serve basis. Once a crate name - is taken it cannot be used for another crate. -* There is currently a 10MB upload size limit on `*.crate` files. - -# Managing a crates.io-based crate - -Management of crates is primarily done through the command line `cargo` tool -rather than the crates.io web interface. For this, there are a few subcommands -to manage a crate. - -## `cargo yank` - -Occasions may arise where you publish a version of a crate that actually ends up -being broken for one reason or another (syntax error, forgot to include a file, -etc). For situations such as this, Cargo supports a "yank" of a version of a -crate. - -```notrust -$ cargo yank --vers 1.0.1 -$ cargo yank --vers 1.0.1 --undo -``` - -A yank **does not** delete any code. This feature is not intended for deleting -accidentally uploaded secrets, for example. If that happens, you must reset -those secrets immediately. - -The semantics of a yanked version are that no new dependencies can be created -against that version, but all existing dependencies continue to work. One of the -major goals of crates.io is to act as a permanent archive of crates that does -not change over time, and allowing deletion of a version would go against this -goal. Essentially a yank means that all projects with a `Cargo.lock` will not -break, while any future `Cargo.lock` files generated will not list the yanked -version. - -## `cargo owner` - -A crate is often developed by more than one person, or the primary maintainer -may change over time! The owner of a crate is the only person allowed to publish -new versions of the crate, but an owner may designate additional owners. - -```notrust -$ cargo owner --add my-buddy -$ cargo owner --remove my-buddy -$ cargo owner --add github:rust-lang:owners -$ cargo owner --remove github:rust-lang:owners -``` - -The owner IDs given to these commands must be GitHub user names or Github teams. - -If a user name is given to `--add`, that user becomes a "named" owner, with -full rights to the crate. In addition to being able to publish or yank versions -of the crate, they have the ability to add or remove owners, *including* the -owner that made *them* an owner. Needless to say, you shouldn't make people you -don't fully trust into a named owner. In order to become a named owner, a user -must have logged into crates.io previously. - -If a team name is given to `--add`, that team becomes a "team" owner, with -restricted right to the crate. While they have permission to publish or yank -versions of the crate, they *do not* have the ability to add or remove owners. -In addition to being more convenient for managing groups of owners, teams are -just a bit more secure against owners becoming malicious. - -The syntax for teams is currently `github:org:team` (see examples above). -In order to add a team as an owner one must be a member of that team. No -such restriction applies to removing a team as an owner. - -## Github Permissions - -Team membership is not something Github provides simple public access to, and it -is likely for you to encounter the following message when working with them: - -> It looks like you don't have permission to query a necessary property from -Github to complete this request. You may need to re-authenticate on crates.io -to grant permission to read github org memberships. Just go to -https://crates.io/login - -This is basically a catch-all for "you tried to query a team, and one of the -five levels of membership access control denied this". That is not an -exaggeration. Github's support for team access control is Enterprise Grade. - -The most likely cause of this is simply that you last logged in before this -feature was added. We originally requested *no* permissions from Github when -authenticating users, because we didn't actually ever use the user's token for -anything other than logging them in. However to query team membership on your -behalf, we now require -[the `read:org` scope](https://developer.github.com/v3/oauth/#scopes). - -You are free to deny us this scope, and everything that worked before teams -were introduced will keep working. However you will never be able to add a team -as an owner, or publish a crate as a team owner. If you ever attempt to do this, -you will get the error above. You may also see this error if you ever try to -publish a crate that you don't own at all, but otherwise happens to have a team. - -If you ever change your mind, or just aren't sure if crates.io has sufficient -permission, you can always go to https://crates.io/login, which will prompt you -for permission if crates.io doesn't have all the scopes it would like to. - -An additional barrier to querying github is that the organization may be -actively denying third party access. To check this, you can go to: - - https://github.com/organizations/:org/settings/oauth_application_policy - -where `:org` is the name of the organization (e.g. rust-lang). You may see -something like: - -![Organization Access Control](images/org-level-acl.png) - -Where you may choose to explicitly remove crates.io from your organization's -blacklist, or simply press the "Remove Restrictions" button to allow all third -party applications to access this data. - -Alternatively, when crates.io requested the `read:org` scope, you could have -explicitly whitelisted crates.io querying the org in question by pressing -the "Grant Access" button next to its name: - -![Authentication Access Control](images/auth-level-acl.png) diff --git a/src/doc/environment-variables.md b/src/doc/environment-variables.md deleted file mode 100644 index 83ff8c8c2a8..00000000000 --- a/src/doc/environment-variables.md +++ /dev/null @@ -1,67 +0,0 @@ -% Environment Variables - -Cargo sets a number of environment variables which your code can detect. To get -the value of any of these variables in a Rust program, do this: - -``` -let version = env!("CARGO_PKG_VERSION") -``` - -`version` will now contain the value of `CARGO_PKG_VERSION`. - -Here are a list of the variables Cargo sets, organized by when it sets them: - -# Environment variables Cargo reads - -* `CARGO_HOME` - Cargo maintains a local cache of the registry index and of git - checkouts of crates. By default these are stored under `$HOME/.cargo`, but - this variable overrides the location of this directory. -* `CARGO_PROFILE` - If this is set to a positive integer *N*, Cargo will record - timing data as it runs. When it exits, it will print this data as a profile - *N* levels deep. -* `CARGO_TARGET_DIR` - Location of where to place all generated artifacts, - relative to the current working directory. -* `RUSTC` - Instead of running `rustc`, Cargo will execute this specified - compiler instead. -* `RUSTDOC` - Instead of running `rustdoc`, Cargo will execute this specified - `rustdoc` instance instead. - -# Environment variables Cargo sets for build scripts - -* `CARGO_MANIFEST_DIR` - The directory containing the manifest for the package - being built (the package containing the build - script). Also note that this is the value of the - current working directory of the build script when it - starts. -* `CARGO_FEATURE_` - For each activated feature of the package being - built, this environment variable will be present - where `` is the name of the feature uppercased - and having `-` translated to `_`. -* `OUT_DIR` - the folder in which all output should be placed. This folder is - inside the build directory for the package being built, and it is - unique for the package in question. -* `TARGET` - the target triple that is being compiled for. Native code should be - compiled for this triple. Some more information about target - triples can be found in [clang's own documentation][clang]. -* `HOST` - the host triple of the rust compiler. -* `NUM_JOBS` - the parallelism specified as the top-level parallelism. This can - be useful to pass a `-j` parameter to a system like `make`. -* `OPT_LEVEL`, `DEBUG` - values of the corresponding variables for the - profile currently being built. -* `PROFILE` - name of the profile currently being built (see - [profiles][profile]). -* `DEP__` - For more information about this set of environment - variables, see build script documentation about [`links`][links]. - -[links]: build-script.html#the-links-manifest-key -[profile]: manifest.html#the-[profile.*]-sections -[clang]:http://clang.llvm.org/docs/CrossCompilation.html#target-triple - -# Environment variables Cargo sets for crates - -* `CARGO_PKG_VERSION` - The full version of your package. -* `CARGO_PKG_VERSION_MAJOR` - The major version of your package. -* `CARGO_PKG_VERSION_MINOR` - The minor version of your package. -* `CARGO_PKG_VERSION_PATCH` - The patch version of your package. -* `CARGO_PKG_VERSION_PRE` - The pre-release version of your package. - diff --git a/src/doc/footer.html b/src/doc/footer.html deleted file mode 100644 index c2eff8f40dc..00000000000 --- a/src/doc/footer.html +++ /dev/null @@ -1,11 +0,0 @@ - -

- - - diff --git a/src/doc/guide.md b/src/doc/guide.md deleted file mode 100644 index e714ac9b78e..00000000000 --- a/src/doc/guide.md +++ /dev/null @@ -1,454 +0,0 @@ -% Cargo Guide - -Welcome to the Cargo guide. This guide will give you all that you need to know -about how to use Cargo to develop Rust projects. - -# Why Cargo exists - -Cargo is a tool that allows Rust projects to declare their various -dependencies, and ensure that you'll always get a repeatable build. - -To accomplish this goal, Cargo does four things: - -* Introduces two metadata files with various bits of project information. -* Fetches and builds your project's dependencies. -* Invokes `rustc` or another build tool with the correct parameters to build your project. -* Introduces conventions, making working with Rust projects easier. - -# Converting to Cargo - -You can convert an existing Rust project to use Cargo. You'll have to create a -`Cargo.toml` file with all of your dependencies, and move your source files and -test files into the places where Cargo expects them to be. See the [manifest -description](manifest.html) and the [Project Layout](#project-layout) section -below for more details. - -# Creating A New Project - -To start a new project with Cargo, use `cargo new`: - -```shell -$ cargo new hello_world --bin -``` - -We're passing `--bin` because we're making a binary program: if we -were making a library, we'd leave it off. If you'd like to not initialize a new -git repository as well (the default), you can also pass `--vcs none`. - -Let's check out what Cargo has generated for us: - -```shell -$ cd hello_world -$ tree . -. -├── Cargo.toml -└── src - └── main.rs - -1 directory, 2 files -``` - -If we had just used `cargo new hello_world` without the `--bin` flag, then the -we would have a `lib.rs` instead of a `main.rs`. For now, however, this is all -we need to get started. First, let's check out `Cargo.toml`: - -```toml -[package] -name = "hello_world" -version = "0.1.0" -authors = ["Your Name "] -``` - -This is called a **manifest**, and it contains all of the metadata that Cargo -needs to compile your project. - -Here's what's in `src/main.rs`: - -``` -fn main() { - println!("Hello, world!"); -} -``` - -Cargo generated a 'hello world' for us. Let's compile it: - -
$ cargo build
-   Compiling hello_world v0.1.0 (file:///path/to/project/hello_world)
- -And then run it: - -```shell -$ ./target/debug/hello_world -Hello, world! -``` - -We can also use `cargo run` to compile and then run it, all in one step: - -
$ cargo run
-     Fresh hello_world v0.1.0 (file:///path/to/project/hello_world)
-   Running `target/debug/hello_world`
-Hello, world!
- -You'll now notice a new file, `Cargo.lock`. It contains information about our -dependencies. Since we don't have any yet, it's not very interesting. - -Once you're ready for release, you can use `cargo build --release` to compile your files with optimizations turned on: - -
$ cargo build --release
-   Compiling hello_world v0.1.0 (file:///path/to/project/hello_world)
- -# Working on an existing Cargo project - -If you download an existing project that uses Cargo, it's really easy -to get going. - -First, get the project from somewhere. In this example, we'll use `color-rs`: - -```sh -$ git clone https://github.com/bjz/color-rs.git -$ cd color-rs -``` - -To build, just use `cargo build`: - -
$ cargo build
-   Compiling color v0.1.0 (file:///path/to/project/color-rs)
- -This will fetch all of the dependencies and then build them, along with the -project. - -# Adding Dependencies - -To depend on a library, add it to your `Cargo.toml`. - -## Adding a dependency - -It's quite simple to add a dependency. Simply add it to your `Cargo.toml` file: - -```toml -[dependencies] -time = "0.1.12" -``` - -Re-run `cargo build` to download the dependencies and build your source with the new dependencies. - - -```toml -[package] -name = "hello_world" -version = "0.1.0" -authors = ["Your Name "] - -[dependencies] -regex = "0.1.41" -``` - -You added the `regex` library, which provides support for regular expressions. - -Now, you can pull in that library using `extern crate` in -`main.rs`. - -``` -extern crate regex; - -use regex::Regex; - -fn main() { - let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap(); - println!("Did our date match? {}", re.is_match("2014-01-01")); -} -``` - -The next time we build, Cargo will fetch this new dependency, all of its -dependencies, compile them all, and update the `Cargo.lock`: - -
$ cargo build
-    Updating registry `https://github.com/rust-lang/crates.io-index`
- Downloading memchr v0.1.5
- Downloading libc v0.1.10
- Downloading regex-synatx v0.2.1
- Downloading memchr v0.1.5
- Downloading aho-corasick v0.3.0
- Downloading regex v0.1.41
-   Compiling memchr v0.1.5
-   Compiling libc v0.1.10
-   Compiling regex-synatx v0.2.1
-   Compiling memchr v0.1.5
-   Compiling aho-corasick v0.3.0
-   Compiling regex v0.1.41
-   Compiling foo v0.1.0 (file:///path/to/project/hello_world)
- -Run it: - -
$ cargo run
-     Running `target/hello_world`
-Did our date match? true
- -Our `Cargo.lock` contains the exact information about which revision of all of -these dependencies we used. - -Now, if `regex` gets updated, we will still build with the same revision, until -we choose to `cargo update`. - -# Project Layout - -Cargo uses conventions for file placement to make it easy to dive into a new -Cargo project: - -* `Cargo.toml` and `Cargo.lock` are stored in the root of your project. -* Source code goes in the `src` directory. -* The default library file is `src/lib.rs`. -* The default executable file is `src/main.rs`. -* Other executables can be placed in `src/bin/*.rs`. -* External tests go in the `tests` directory. -* Example executable files go in the `examples` directory. -* Benchmarks go in the `benches` directory. - -These are explained in more detail in the [manifest -description](manifest.html#the-project-layout). - -# Cargo.toml vs Cargo.lock - -`Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk -about them, here's a summary: - -* `Cargo.toml` is about describing your dependencies in a broad sense, and is written by you. -* `Cargo.lock` contains exact information about your dependencies, and is maintained by Cargo. -* If you're building a library, put `Cargo.lock` in your `.gitignore`. -* If you're building an executable, check `Cargo.lock` into `git`. - -Let's dig in a little bit more. - -`Cargo.toml` is a **manifest** file. In the manifest, we can specify a bunch of -different metadata about our project. For example, we can say that we depend -on another project: - -```toml -[package] -name = "hello_world" -version = "0.1.0" -authors = ["Your Name "] - -[dependencies.color] -git = "https://github.com/bjz/color-rs.git" -``` - -This project has a single dependency, on the `color` library. We've stated in -this case that we're relying on a particular Git repository that lives on -GitHub. Since we haven't specified any other information, Cargo assumes that -we intend to use the latest commit on the `master` branch to build our project. - -Sound good? Well, there's one problem: If you build this project today, and -then you send a copy to me, and I build this project tomorrow, something bad -could happen. `bjz` could update `color-rs` in the meantime, and my build would -include this commit, while yours would not. Therefore, we would get different -builds. This would be bad, because we want reproducible builds. - -We could fix this problem by putting a `rev` line in our `Cargo.toml`: - -```toml -[dependencies.color] -git = "https://github.com/bjz/color-rs.git" -rev = "bf739419e2d31050615c1ba1a395b474269a4" -``` - -Now, our builds will be the same. But, there's a big drawback: now we have to -manually think about SHA-1s every time we want to update our library. This is -both tedious and error prone. - -Enter the `Cargo.lock`. Because of its existence, we don't need to manually -keep track of the exact revisions: Cargo will do it for us. When we have a -manifest like this: - -```toml -[package] -name = "hello_world" -version = "0.1.0" -authors = ["Your Name "] - -[dependencies.color] -git = "https://github.com/bjz/color-rs.git" -``` - -Cargo will take the latest commit, and write that information out into our -`Cargo.lock` when we build for the first time. That file will look like this: - -```toml -[root] -name = "hello_world" -version = "0.1.0" -dependencies = [ - "color 0.1.0 (git+https://github.com/bjz/color-rs.git#bf739419e2d31050615c1ba1a395b474269a4b98)", -] - -[[package]] -name = "color" -version = "0.1.0" -source = "git+https://github.com/bjz/color-rs.git#bf739419e2d31050615c1ba1a395b474269a4b98" - -``` - -You can see that there's a lot more information here, including the exact -revision we used to build. Now, when you give your project to someone else, -they'll use the exact same SHA, even though we didn't specify it in our -`Cargo.toml`. - -When we're ready to opt in to a new version of the library, Cargo can -re-calculate the dependencies, and update things for us: - -```shell -$ cargo update # updates all dependencies -$ cargo update -p color # updates just 'color' -``` - -This will write out a new `Cargo.lock` with the new version information. Note -that the argument to `cargo update` is actually a -[Package ID Specification](pkgid-spec.html) and `color` is just a short -specification. - -# Overriding Dependencies - -Sometimes, you may want to override one of Cargo's dependencies. For example, -let's say you're working on a project, `conduit-static`, which depends on -the package `conduit`. You find a bug in `conduit`, and you want to write a -patch. Here's what `conduit-static`'s `Cargo.toml` looks like: - -```toml -[package] -name = "conduit-static" -version = "0.1.0" -authors = ["Yehuda Katz "] - -[dependencies] -conduit = "0.7" -``` - -You check out a local copy of `conduit`, let's say in your `~/src` directory: - -```shell -$ cd ~/src -$ git clone https://github.com/conduit-rust/conduit.git -``` - -You'd like to have `conduit-static` use your local version of `conduit`, -rather than the one on GitHub, while you fix the bug. - -Cargo solves this problem by allowing you to have a local configuration -that specifies an **override**. If Cargo finds this configuration when -building your package, it will use the override on your local machine -instead of the source specified in your `Cargo.toml`. - -Cargo looks for a directory named `.cargo` up the directory hierarchy of -your project. If your project is in `/path/to/project/conduit-static`, -it will search for a `.cargo` in: - -* `/path/to/project/conduit-static` -* `/path/to/project` -* `/path/to` -* `/path` -* `/` - -This allows you to specify your overrides in a parent directory that -includes commonly used packages that you work on locally, and share them -with all projects. - -To specify overrides, create a `.cargo/config` file in some ancestor of -your project's directory (common places to put it is in the root of -your code directory or in your home directory). - -Inside that file, put this: - -```toml -paths = ["/path/to/project/conduit"] -``` - -This array should be filled with directories that contain a `Cargo.toml`. In -this instance, we're just adding `conduit`, so it will be the only one that's -overridden. This path must be an absolute path. - -Note: using a local configuration to override paths will only work for crates -that have been published to crates.io. You cannot use this feature to tell Cargo -how to find local unpublished crates. - -More information about local configuration can be found in the [configuration -documentation](config.html). - -# Tests - -Cargo can run your tests with the `cargo test` command. Cargo runs tests in two -places: in each of your `src` files, and any tests in `tests/`. Tests -in your `src` files should be unit tests, and tests in `tests/` should be -integration-style tests. As such, you'll need to import your crates into -the files in `tests`. - -To run your tests, just run `cargo test`: - -
$ cargo test
-   Compiling color v0.1.0 (https://github.com/bjz/color-rs.git#bf739419)
-   Compiling hello_world v0.1.0 (file:///path/to/project/hello_world)
-     Running target/test/hello_world-9c2b65bbb79eabce
-
-running 0 tests
-
-test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
-
- -Of course, if your project has tests, you'll see more output, with the -correct number of tests. - -You can also run a specific test by passing a filter: - -
$ cargo test foo
-
- -This will run any test with `foo` in its name. - -`cargo test` runs additional tests as well. For example, it will compile any -examples, you’ve included, and will also test the examples in your -documentation. Please see the [testing guide][testing] in the Rust -documentation for more details. - -[testing]: https://doc.rust-lang.org/book/testing.html - -# Path Dependencies - -Over time our `hello_world` project has grown significantly in size! It's gotten -to the point that we probably want to split out a separate crate for others to -use. To do this Cargo supports **path dependencies** which are typically -sub-crates that live within one repository. Let's start off by making a new -crate inside of our `hello_world` project: - -```shell -# inside of hello_world/ -$ cargo new hello_utils -``` - -This will create a new folder `hello_utils` inside of which a `Cargo.toml` and -`src` folder are ready to be configured. In order to tell Cargo about this, open -up `hello_world/Cargo.toml` and add these lines: - -```toml -[dependencies.hello_utils] -path = "hello_utils" -``` - -This tells Cargo that we depend on a crate called `hello_utils` which is found -in the `hello_utils` folder (relative to the `Cargo.toml` it's written in). - -And that's it! The next `cargo build` will automatically build `hello_utils` and -all of its own dependencies, and others can also start using the crate as well. - -## Travis-CI - -To test your project on Travis-CI, here is a sample `.travis.yml` file: - -``` -language: rust -``` diff --git a/src/doc/header.html b/src/doc/header.html deleted file mode 100644 index 3a1ec6ac713..00000000000 --- a/src/doc/header.html +++ /dev/null @@ -1,47 +0,0 @@ - - - - - - - - -
diff --git a/src/doc/images/Cargo-Logo-Small.png b/src/doc/images/Cargo-Logo-Small.png deleted file mode 100644 index eca9665f62e..00000000000 Binary files a/src/doc/images/Cargo-Logo-Small.png and /dev/null differ diff --git a/src/doc/images/auth-level-acl.png b/src/doc/images/auth-level-acl.png deleted file mode 100644 index 09ccae64452..00000000000 Binary files a/src/doc/images/auth-level-acl.png and /dev/null differ diff --git a/src/doc/images/circle-with-i.png b/src/doc/images/circle-with-i.png deleted file mode 100644 index f9feda4bcbf..00000000000 Binary files a/src/doc/images/circle-with-i.png and /dev/null differ diff --git a/src/doc/images/forkme.png b/src/doc/images/forkme.png deleted file mode 100644 index 100aad0f6af..00000000000 Binary files a/src/doc/images/forkme.png and /dev/null differ diff --git a/src/doc/images/noise.png b/src/doc/images/noise.png deleted file mode 100644 index 545d9305462..00000000000 Binary files a/src/doc/images/noise.png and /dev/null differ diff --git a/src/doc/images/org-level-acl.png b/src/doc/images/org-level-acl.png deleted file mode 100644 index c3f13e14ee6..00000000000 Binary files a/src/doc/images/org-level-acl.png and /dev/null differ diff --git a/src/doc/images/search.png b/src/doc/images/search.png deleted file mode 100644 index a4814a5b09b..00000000000 Binary files a/src/doc/images/search.png and /dev/null differ diff --git a/src/doc/index.md b/src/doc/index.md deleted file mode 100644 index 699c808f4f0..00000000000 --- a/src/doc/index.md +++ /dev/null @@ -1,88 +0,0 @@ -% Cargo, Rust's Package Manager - -# Installing - -The easiest way to get Cargo is to get the current stable release of Rust by -using the `rustup` script: - -```shell -$ curl -sSf https://static.rust-lang.org/rustup.sh | sh -``` - -This will get you the current stable release of Rust for your platform along -with the latest Cargo. - -If you are on Windows, you can directly download the latest 32bit ([Rust](https://static.rust-lang.org/dist/rust-1.0.0-i686-pc-windows-gnu.msi) -and [Cargo](https://static.rust-lang.org/cargo-dist/cargo-nightly-i686-pc-windows-gnu.tar.gz)) or 64bit ([Rust](https://static.rust-lang.org/dist/rust-1.0.0-x86_64-pc-windows-gnu.msi) and [Cargo](https://static.rust-lang.org/cargo-dist/cargo-nightly-x86_64-pc-windows-gnu.tar.gz)) Rust stable releases or Cargo nightlies. - -Alternatively, you can build Cargo from source. - -# Let's Get Started - -To start a new project with Cargo, use `cargo new`: - -```shell -$ cargo new hello_world --bin -``` - -We're passing `--bin` because we're making a binary program: if we -were making a library, we'd leave it off. - -Let's check out what Cargo has generated for us: - -```shell -$ cd hello_world -$ tree . -. -├── Cargo.toml -└── src - └── main.rs - -1 directory, 2 files -``` - -This is all we need to get started. First, let's check out `Cargo.toml`: - -```toml -[package] -name = "hello_world" -version = "0.1.0" -authors = ["Your Name "] -``` - -This is called a **manifest**, and it contains all of the metadata that Cargo -needs to compile your project. - -Here's what's in `src/main.rs`: - -``` -fn main() { - println!("Hello, world!"); -} -``` - -Cargo generated a 'hello world' for us. Let's compile it: - -
$ cargo build
-   Compiling hello_world v0.1.0 (file:///path/to/project/hello_world)
- -And then run it: - -```shell -$ ./target/debug/hello_world -Hello, world! -``` - -We can also use `cargo run` to compile and then run it, all in one step: - -
$ cargo run
-     Fresh hello_world v0.1.0 (file:///path/to/project/hello_world)
-   Running `target/hello_world`
-Hello, world!
- -# Going Further - -For more details on using Cargo, check out the [Cargo Guide](guide.html) diff --git a/src/doc/javascripts/all.js b/src/doc/javascripts/all.js deleted file mode 100644 index 2694e8fc60b..00000000000 --- a/src/doc/javascripts/all.js +++ /dev/null @@ -1,36 +0,0 @@ -//= require_tree . - -Prism.languages.toml = { - // https://github.com/LeaVerou/prism/issues/307 - 'comment': [{ - pattern: /(^[^"]*?("[^"]*?"[^"]*?)*?[^"\\]*?)(\/\*[\w\W]*?\*\/|(^|[^:])#.*?(\r?\n|$))/g, - lookbehind: true - }], - 'string': /("|')(\\?.)*?\1/g, - 'number': /\d+/, - 'boolean': /true|false/, - 'toml-section': /\[.*\]/, - 'toml-key': /[\w-]+/ -}; - -$(function() { - var pres = document.querySelectorAll('pre.rust'); - for (var i = 0; i < pres.length; i++) { - pres[i].className += ' language-rust'; - } - - $('button.dropdown, a.dropdown').click(function(el, e) { - $(this).toggleClass('active'); - $(this).siblings('ul').toggleClass('open'); - - if ($(this).hasClass('active')) { - $(document).on('mousedown.useroptions', function() { - setTimeout(function() { - $('button.dropdown, a.dropdown').removeClass('active'); - $('button.dropdown + ul').removeClass('open'); - }, 150); - $(document).off('mousedown.useroptions'); - }); - } - }); -}); diff --git a/src/doc/javascripts/prism.js b/src/doc/javascripts/prism.js deleted file mode 100644 index 13c24078300..00000000000 --- a/src/doc/javascripts/prism.js +++ /dev/null @@ -1,6 +0,0 @@ -/* http://prismjs.com/download.html?themes=prism&languages=markup+css+clike+javascript */ -self="undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{};var Prism=function(){var e=/\blang(?:uage)?-(?!\*)(\w+)\b/i,t=self.Prism={util:{encode:function(e){return e instanceof n?new n(e.type,t.util.encode(e.content),e.alias):"Array"===t.util.type(e)?e.map(t.util.encode):e.replace(/&/g,"&").replace(/e.length)break e;if(!(d instanceof a)){c.lastIndex=0;var m=c.exec(d);if(m){u&&(f=m[1].length);var y=m.index-1+f,m=m[0].slice(f),v=m.length,k=y+v,b=d.slice(0,y+1),w=d.slice(k+1),N=[p,1];b&&N.push(b);var O=new a(l,g?t.tokenize(m,g):m,h);N.push(O),w&&N.push(w),Array.prototype.splice.apply(r,N)}}}}}return r},hooks:{all:{},add:function(e,n){var a=t.hooks.all;a[e]=a[e]||[],a[e].push(n)},run:function(e,n){var a=t.hooks.all[e];if(a&&a.length)for(var r,i=0;r=a[i++];)r(n)}}},n=t.Token=function(e,t,n){this.type=e,this.content=t,this.alias=n};if(n.stringify=function(e,a,r){if("string"==typeof e)return e;if("[object Array]"==Object.prototype.toString.call(e))return e.map(function(t){return n.stringify(t,a,e)}).join("");var i={type:e.type,content:n.stringify(e.content,a,r),tag:"span",classes:["token",e.type],attributes:{},language:a,parent:r};if("comment"==i.type&&(i.attributes.spellcheck="true"),e.alias){var l="Array"===t.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(i.classes,l)}t.hooks.run("wrap",i);var o="";for(var s in i.attributes)o+=s+'="'+(i.attributes[s]||"")+'"';return"<"+i.tag+' class="'+i.classes.join(" ")+'" '+o+">"+i.content+""},!self.document)return self.addEventListener?(self.addEventListener("message",function(e){var n=JSON.parse(e.data),a=n.language,r=n.code;self.postMessage(JSON.stringify(t.util.encode(t.tokenize(r,t.languages[a])))),self.close()},!1),self.Prism):self.Prism;var a=document.getElementsByTagName("script");return a=a[a.length-1],a&&(t.filename=a.src,document.addEventListener&&!a.hasAttribute("data-manual")&&document.addEventListener("DOMContentLoaded",t.highlightAll)),self.Prism}();"undefined"!=typeof module&&module.exports&&(module.exports=Prism);; -Prism.languages.markup={comment://g,prolog:/<\?.+?\?>/,doctype://,cdata://i,tag:{pattern:/<\/?[\w:-]+\s*(?:\s+[\w:-]+(?:=(?:("|')(\\?[\w\W])*?\1|[^\s'">=]+))?\s*)*\/?>/gi,inside:{tag:{pattern:/^<\/?[\w:-]+/i,inside:{punctuation:/^<\/?/,namespace:/^[\w-]+?:/}},"attr-value":{pattern:/=(?:('|")[\w\W]*?(\1)|[^\s>]+)/gi,inside:{punctuation:/=|>|"/g}},punctuation:/\/?>/g,"attr-name":{pattern:/[\w:-]+/g,inside:{namespace:/^[\w-]+?:/}}}},entity:/\&#?[\da-z]{1,8};/gi},Prism.hooks.add("wrap",function(t){"entity"===t.type&&(t.attributes.title=t.content.replace(/&/,"&"))});; -Prism.languages.css={comment:/\/\*[\w\W]*?\*\//g,atrule:{pattern:/@[\w-]+?.*?(;|(?=\s*{))/gi,inside:{punctuation:/[;:]/g}},url:/url\((["']?).*?\1\)/gi,selector:/[^\{\}\s][^\{\};]*(?=\s*\{)/g,property:/(\b|\B)[\w-]+(?=\s*:)/gi,string:/("|')(\\?.)*?\1/g,important:/\B!important\b/gi,punctuation:/[\{\};:]/g,"function":/[-a-z0-9]+(?=\()/gi},Prism.languages.markup&&Prism.languages.insertBefore("markup","tag",{style:{pattern:/[\w\W]*?<\/style>/gi,inside:{tag:{pattern:/|<\/style>/gi,inside:Prism.languages.markup.tag.inside},rest:Prism.languages.css}}});; -Prism.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\w\W]*?\*\//g,lookbehind:!0},{pattern:/(^|[^\\:])\/\/.*?(\r?\n|$)/g,lookbehind:!0}],string:/("|')(\\?.)*?\1/g,"class-name":{pattern:/((?:(?:class|interface|extends|implements|trait|instanceof|new)\s+)|(?:catch\s+\())[a-z0-9_\.\\]+/gi,lookbehind:!0,inside:{punctuation:/(\.|\\)/}},keyword:/\b(if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/g,"boolean":/\b(true|false)\b/g,"function":{pattern:/[a-z0-9_]+\(/gi,inside:{punctuation:/\(/}},number:/\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?)\b/g,operator:/[-+]{1,2}|!|<=?|>=?|={1,3}|&{1,2}|\|?\||\?|\*|\/|\~|\^|\%/g,ignore:/&(lt|gt|amp);/gi,punctuation:/[{}[\];(),.:]/g};; -Prism.languages.javascript=Prism.languages.extend("clike",{keyword:/\b(break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|false|finally|for|function|get|if|implements|import|in|instanceof|interface|let|new|null|package|private|protected|public|return|set|static|super|switch|this|throw|true|try|typeof|var|void|while|with|yield)\b/g,number:/\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?|NaN|-?Infinity)\b/g}),Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:/(^|[^/])\/(?!\/)(\[.+?]|\\.|[^/\r\n])+\/[gim]{0,3}(?=\s*($|[\r\n,.;})]))/g,lookbehind:!0}}),Prism.languages.markup&&Prism.languages.insertBefore("markup","tag",{script:{pattern:/[\w\W]*?<\/script>/gi,inside:{tag:{pattern:/|<\/script>/gi,inside:Prism.languages.markup.tag.inside},rest:Prism.languages.javascript}}});; diff --git a/src/doc/man/cargo-bench.adoc b/src/doc/man/cargo-bench.adoc new file mode 100644 index 00000000000..879ee2021ce --- /dev/null +++ b/src/doc/man/cargo-bench.adoc @@ -0,0 +1,142 @@ += cargo-bench(1) +:idprefix: cargo_bench_ +:doctype: manpage +:actionverb: Benchmark +:nouns: benchmarks + +== NAME + +cargo-bench - Execute benchmarks of a package + +== SYNOPSIS + +`cargo bench [_OPTIONS_] [BENCHNAME] [-- _BENCH-OPTIONS_]` + +== DESCRIPTION + +Compile and execute benchmarks. + +The benchmark filtering argument `BENCHNAME` and all the arguments following +the two dashes (`--`) are passed to the benchmark binaries and thus to +_libtest_ (rustc's built in unit-test and micro-benchmarking framework). If +you're passing arguments to both Cargo and the binary, the ones after `--` go +to the binary, the ones before go to Cargo. For details about libtest's +arguments see the output of `cargo bench -- --help`. As an example, this will +run only the benchmark named `foo` (and skip other similarly named benchmarks +like `foobar`): + + cargo bench -- foo --exact + +Benchmarks are built with the `--test` option to `rustc` which creates an +executable with a `main` function that automatically runs all functions +annotated with the `#[bench]` attribute. Cargo passes the `--bench` flag to +the test harness to tell it to run only benchmarks. + +The libtest harness may be disabled by setting `harness = false` in the target +manifest settings, in which case your code will need to provide its own `main` +function to handle running benchmarks. + +== OPTIONS + +=== Benchmark Options + +include::options-test.adoc[] + +=== Package Selection + +include::options-packages.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo bench` will build the +following targets of the selected packages: + +- lib — used to link with binaries and benchmarks +- bins (only if benchmark targets are built and required features are + available) +- lib as a benchmark +- bins as benchmarks +- benchmark targets + +The default behavior can be changed by setting the `bench` flag for the target +in the manifest settings. Setting examples to `bench = true` will build and +run the example as a benchmark. Setting targets to `bench = false` will stop +them from being benchmarked by default. Target selection options that take a +target by name ignore the `bench` flag and will always benchmark the given +target. + +include::options-targets.adoc[] + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +=== Display Options + +By default the Rust test harness hides output from benchmark execution to keep +results readable. Benchmark output can be recovered (e.g., for debugging) by +passing `--nocapture` to the benchmark binaries: + + cargo bench -- --nocapture + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +The `--jobs` argument affects the building of the benchmark executable but +does not affect how many threads are used when running the benchmarks. The +Rust test harness runs benchmarks serially in a single thread. + +include::options-jobs.adoc[] + +== PROFILES + +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +linkcargo:reference/manifest.html#the-profile-sections[the reference] +for more details. + +Benchmarks are always built with the `bench` profile. Binary and lib targets +are built separately as benchmarks with the `bench` profile. Library targets +are built with the `release` profiles when linked to binaries and benchmarks. +Dependencies use the `release` profile. + +If you need a debug build of a benchmark, try building it with +man:cargo-build[1] which will use the `test` profile which is by default +unoptimized and includes debug information. You can then run the debug-enabled +benchmark manually. + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Build and execute all the benchmarks of the current package: + + cargo bench + +. Run only a specific benchmark within a specific benchmark target: + + cargo bench --bench bench_name -- modname::some_benchmark + +== SEE ALSO +man:cargo[1], man:cargo-test[1] diff --git a/src/doc/man/cargo-build.adoc b/src/doc/man/cargo-build.adoc new file mode 100644 index 00000000000..a7b20d7de64 --- /dev/null +++ b/src/doc/man/cargo-build.adoc @@ -0,0 +1,98 @@ += cargo-build(1) +:idprefix: cargo_build_ +:doctype: manpage +:actionverb: Build + +== NAME + +cargo-build - Compile the current package + +== SYNOPSIS + +`cargo build [_OPTIONS_]` + +== DESCRIPTION + +Compile local packages and all of their dependencies. + +== OPTIONS + +=== Package Selection + +include::options-packages.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo build` will build all +binary and library targets of the selected packages. Binaries are skipped if +they have `required-features` that are missing. + +include::options-targets.adoc[] + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-release.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +*--out-dir* _DIRECTORY_:: + Copy final artifacts to this directory. ++ +This option is unstable and available only on the +link:https://doc.rust-lang.org/book/appendix-07-nightly-rust.html[nightly channel] +and requires the `-Z unstable-options` flag to enable. +See https://github.com/rust-lang/cargo/issues/6790 for more information. + +=== Display Options + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +*--build-plan*:: + Outputs a series of JSON messages to stdout that indicate the commands to + run the build. ++ +This option is unstable and available only on the +link:https://doc.rust-lang.org/book/appendix-07-nightly-rust.html[nightly channel] +and requires the `-Z unstable-options` flag to enable. +See https://github.com/rust-lang/cargo/issues/5579 for more information. + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +include::section-profiles.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Build the local package and all of its dependencies: + + cargo build + +. Build with optimizations: + + cargo build --release + +== SEE ALSO +man:cargo[1], man:cargo-rustc[1] diff --git a/src/doc/man/cargo-check.adoc b/src/doc/man/cargo-check.adoc new file mode 100644 index 00000000000..c84b1dcef4c --- /dev/null +++ b/src/doc/man/cargo-check.adoc @@ -0,0 +1,87 @@ += cargo-check(1) +:idprefix: cargo_check_ +:doctype: manpage +:actionverb: Check + +== NAME + +cargo-check - Check the current package + +== SYNOPSIS + +`cargo check [_OPTIONS_]` + +== DESCRIPTION + +Check a local package and all of its dependencies for errors. This will +essentially compile the packages without performing the final step of code +generation, which is faster than running `cargo build`. The compiler will save +metadata files to disk so that future runs will reuse them if the source has +not been modified. + +== OPTIONS + +=== Package Selection + +include::options-packages.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo check` will check all +binary and library targets of the selected packages. Binaries are skipped if +they have `required-features` that are missing. + +include::options-targets.adoc[] + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-release.adoc[] + +include::options-profile.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +=== Display Options + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +include::section-profiles.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Check the local package for errors: + + cargo check + +. Check all targets, including unit tests: + + cargo check --all-targets --profile=test + +== SEE ALSO +man:cargo[1], man:cargo-build[1] diff --git a/src/doc/man/cargo-clean.adoc b/src/doc/man/cargo-clean.adoc new file mode 100644 index 00000000000..08fdb76deb3 --- /dev/null +++ b/src/doc/man/cargo-clean.adoc @@ -0,0 +1,76 @@ += cargo-clean(1) +:idprefix: cargo_clean_ +:doctype: manpage +:actionverb: Clean + +== NAME + +cargo-clean - Remove generated artifacts + +== SYNOPSIS + +`cargo clean [_OPTIONS_]` + +== DESCRIPTION + +Remove artifacts from the target directory that Cargo has generated in the +past. + +With no options, `cargo clean` will delete the entire target directory. + +== OPTIONS + +=== Package Selection + +When no packages are selected, all packages and all dependencies in the +workspace are cleaned. + +*-p* _SPEC_...:: +*--package* _SPEC_...:: + Clean only the specified packages. This flag may be specified + multiple times. See man:cargo-pkgid[1] for the SPEC format. + +=== Clean Options + +*--doc*:: + This option will cause `cargo clean` to remove only the `doc` directory in + the target directory. + +*--release*:: + Clean all artifacts that were built with the `release` or `bench` + profiles. + +include::options-target-dir.adoc[] + +include::options-target-triple.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Remove the entire target directory: + + cargo clean + +. Remove only the release artifacts: + + cargo clean --release + +== SEE ALSO +man:cargo[1], man:cargo-build[1] diff --git a/src/doc/man/cargo-doc.adoc b/src/doc/man/cargo-doc.adoc new file mode 100644 index 00000000000..99bfe75ff11 --- /dev/null +++ b/src/doc/man/cargo-doc.adoc @@ -0,0 +1,95 @@ += cargo-doc(1) +:idprefix: cargo_doc_ +:doctype: manpage +:actionverb: Document + +== NAME + +cargo-doc - Build a package's documentation + +== SYNOPSIS + +`cargo doc [_OPTIONS_]` + +== DESCRIPTION + +Build the documentation for the local package and all dependencies. The output +is placed in `target/doc` in rustdoc's usual format. + +== OPTIONS + +=== Documentation Options + +*--open*:: + Open the docs in a browser after building them. + +*--no-deps*:: + Do not build documentation for dependencies. + +*--document-private-items*:: + Include non-public items in the documentation. + +=== Package Selection + +include::options-packages.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo doc` will document all +binary and library targets of the selected package. The binary will be skipped +if its name is the same as the lib target. Binaries are skipped if they have +`required-features` that are missing. + +The default behavior can be changed by setting `doc = false` for the target in +the manifest settings. Using target selection options will ignore the `doc` +flag and will always document the given target. + +include::options-targets-lib-bin.adoc[] + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-release.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +=== Display Options + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +include::section-profiles.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Build the local package documentation and its dependencies and output to +`target/doc`. + + cargo doc + +== SEE ALSO +man:cargo[1], man:cargo-rustdoc[1], man:rustdoc[1] diff --git a/src/doc/man/cargo-fetch.adoc b/src/doc/man/cargo-fetch.adoc new file mode 100644 index 00000000000..3a12882b13e --- /dev/null +++ b/src/doc/man/cargo-fetch.adoc @@ -0,0 +1,61 @@ += cargo-fetch(1) +:idprefix: cargo_fetch_ +:doctype: manpage +:actionverb: Fetch + +== NAME + +cargo-fetch - Fetch dependencies of a package from the network + +== SYNOPSIS + +`cargo fetch [_OPTIONS_]` + +== DESCRIPTION + +If a `Cargo.lock` file is available, this command will ensure that all of the +git dependencies and/or registry dependencies are downloaded and locally +available. Subsequent Cargo commands never touch the network after a `cargo +fetch` unless the lock file changes. + +If the lock file is not available, then this command will generate the lock +file before fetching the dependencies. + +If `--target` is not specified, then all target dependencies are fetched. + +See also the link:https://crates.io/crates/cargo-prefetch[cargo-prefetch] +plugin which adds a command to download popular crates. This may be useful if +you plan to use Cargo without a network with the `--offline` flag. + +== OPTIONS + +=== Fetch options + +include::options-target-triple.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Fetch all dependencies: + + cargo fetch + +== SEE ALSO +man:cargo[1], man:cargo-update[1], man:cargo-generate-lockfile[1] diff --git a/src/doc/man/cargo-fix.adoc b/src/doc/man/cargo-fix.adoc new file mode 100644 index 00000000000..2a7ff56a64f --- /dev/null +++ b/src/doc/man/cargo-fix.adoc @@ -0,0 +1,138 @@ += cargo-fix(1) +:idprefix: cargo_fix_ +:doctype: manpage +:actionverb: Fix + +== NAME + +cargo-fix - Automatically fix lint warnings reported by rustc + +== SYNOPSIS + +`cargo fix [_OPTIONS_]` + +== DESCRIPTION + +This Cargo subcommand will automatically take rustc's suggestions from +diagnostics like warnings and apply them to your source code. This is intended +to help automate tasks that rustc itself already knows how to tell you to fix! +The `cargo fix` subcommand is also being developed for the Rust 2018 edition +to provide code the ability to easily opt-in to the new edition without having +to worry about any breakage. + +Executing `cargo fix` will under the hood execute man:cargo-check[1]. Any warnings +applicable to your crate will be automatically fixed (if possible) and all +remaining warnings will be displayed when the check process is finished. For +example if you'd like to prepare for the 2018 edition, you can do so by +executing: + + cargo fix --edition + +which behaves the same as `cargo check --all-targets`. Similarly if you'd like +to fix code for different platforms you can do: + + cargo fix --edition --target x86_64-pc-windows-gnu + +or if your crate has optional features: + + cargo fix --edition --no-default-features --features foo + +If you encounter any problems with `cargo fix` or otherwise have any questions +or feature requests please don't hesitate to file an issue at +https://github.com/rust-lang/cargo + +== OPTIONS + +=== Fix options + +*--broken-code*:: + Fix code even if it already has compiler errors. This is useful if `cargo + fix` fails to apply the changes. It will apply the changes and leave the + broken code in the working directory for you to inspect and manually fix. + +*--edition*:: + Apply changes that will update the code to the latest edition. This will + not update the edition in the `Cargo.toml` manifest, which must be updated + manually. + +*--edition-idioms*:: + Apply suggestions that will update code to the preferred style for the + current edition. + +*--allow-no-vcs*:: + Fix code even if a VCS was not detected. + +*--allow-dirty*:: + Fix code even if the working directory has changes. + +*--allow-staged*:: + Fix code even if the working directory has staged changes. + +=== Package Selection + +include::options-packages.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo fix` will fix all targets +(`--all-targets` implied). Binaries are skipped if they have +`required-features` that are missing. + +include::options-targets.adoc[] + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-release.adoc[] + +include::options-profile.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +=== Display Options + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +include::section-profiles.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Apply compiler suggestions to the local package: + + cargo fix + +. Convert a 2015 edition to 2018: + + cargo fix --edition + +. Apply suggested idioms for the current edition: + + cargo fix --edition-idioms + +== SEE ALSO +man:cargo[1], man:cargo-check[1] diff --git a/src/doc/man/cargo-generate-lockfile.adoc b/src/doc/man/cargo-generate-lockfile.adoc new file mode 100644 index 00000000000..2b89159782d --- /dev/null +++ b/src/doc/man/cargo-generate-lockfile.adoc @@ -0,0 +1,49 @@ += cargo-generate-lockfile(1) +:idprefix: cargo_generate-lockfile_ +:doctype: manpage + +== NAME + +cargo-generate-lockfile - Generate the lockfile for a package + +== SYNOPSIS + +`cargo generate-lockfile [_OPTIONS_]` + +== DESCRIPTION + +This command will create the `Cargo.lock` lockfile for the current package or +workspace. If the lockfile already exists, it will be rebuilt if there are any +manifest changes or dependency updates. + +See also man:cargo-update[1] which is also capable of creating a `Cargo.lock` +lockfile and has more options for controlling update behavior. + +== OPTIONS + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Create or update the lockfile for the current package or workspace: + + cargo generate-lockfile + +== SEE ALSO +man:cargo[1], man:cargo-update[1] diff --git a/src/doc/man/cargo-help.adoc b/src/doc/man/cargo-help.adoc new file mode 100644 index 00000000000..bcbb5ba34c3 --- /dev/null +++ b/src/doc/man/cargo-help.adoc @@ -0,0 +1,28 @@ += cargo-help(1) +:idprefix: cargo_help_ +:doctype: manpage + +== NAME + +cargo-help - Get help for a Cargo command + +== SYNOPSIS + +`cargo help [_SUBCOMMAND_]` + +== DESCRIPTION + +Prints a help message for the given command. + +== EXAMPLES + +. Get help for a command: + + cargo help build + +. Help is also available with the `--help` flag: + + cargo build --help + +== SEE ALSO +man:cargo[1] diff --git a/src/doc/man/cargo-init.adoc b/src/doc/man/cargo-init.adoc new file mode 100644 index 00000000000..6df38bf6836 --- /dev/null +++ b/src/doc/man/cargo-init.adoc @@ -0,0 +1,55 @@ += cargo-init(1) +:idprefix: cargo_init_ +:doctype: manpage + +== NAME + +cargo-init - Create a new Cargo package in an existing directory + +== SYNOPSIS + +`cargo init [_OPTIONS_] [_PATH_]` + +== DESCRIPTION + +This command will create a new Cargo manifest in the current directory. Give a +path as an argument to create in the given directory. + +If there are typically-named Rust source files already in the directory, those +will be used. If not, then a sample `src/main.rs` file will be created, or +`src/lib.rs` if `--lib` is passed. + +If the directory is not already in a VCS repository, then a new repository +is created (see `--vcs` below). + +include::description-new-authors.adoc[] + +See man:cargo-new[1] for a similar command which will create a new package in +a new directory. + +== OPTIONS + +=== Init Options + +include::options-new.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Create a binary Cargo package in the current directory: + + cargo init + +== SEE ALSO +man:cargo[1], man:cargo-new[1] diff --git a/src/doc/man/cargo-install.adoc b/src/doc/man/cargo-install.adoc new file mode 100644 index 00000000000..fef7b250d18 --- /dev/null +++ b/src/doc/man/cargo-install.adoc @@ -0,0 +1,149 @@ += cargo-install(1) +:idprefix: cargo_install_ +:doctype: manpage +:actionverb: Install + +== NAME + +cargo-install - Build and install a Rust binary + +== SYNOPSIS + +[%hardbreaks] +`cargo install [_OPTIONS_] _CRATE_...` +`cargo install [_OPTIONS_] --path _PATH_` +`cargo install [_OPTIONS_] --git _URL_ [_CRATE_...]` +`cargo install [_OPTIONS_] --list` + +== DESCRIPTION + +This command manages Cargo's local set of installed binary crates. Only +packages which have executable `\[[bin]]` or `\[[example]]` targets can be +installed, and all executables are installed into the installation root's +`bin` folder. + +include::description-install-root.adoc[] + +There are multiple sources from which a crate can be installed. The default +location is crates.io but the `--git`, `--path`, and `--registry` flags can +change this source. If the source contains more than one package (such as +crates.io or a git repository with multiple crates) the _CRATE_ argument is +required to indicate which crate should be installed. + +Crates from crates.io can optionally specify the version they wish to install +via the `--version` flags, and similarly packages from git repositories can +optionally specify the branch, tag, or revision that should be installed. If a +crate has multiple binaries, the `--bin` argument can selectively install only +one of them, and if you'd rather install examples the `--example` argument can +be used as well. + +If the source is crates.io or `--git` then by default the crate will be built +in a temporary target directory. To avoid this, the target directory can be +specified by setting the `CARGO_TARGET_DIR` environment variable to a relative +path. In particular, this can be useful for caching build artifacts on +continuous integration systems. + +By default, the `Cargo.lock` file that is included with the package will be +ignored. This means that Cargo will recompute which versions of dependencies +to use, possibly using newer versions that have been released since the +package was published. The `--locked` flag can be used to force Cargo to use +the packaged `Cargo.lock` file if it is available. This may be useful for +ensuring reproducible builds, to use the exact same set of dependencies that +were available when the package was published. It may also be useful if a +newer version of a dependency is published that no longer builds on your +system, or has other problems. The downside to using `--locked` is that you +will not receive any fixes or updates to any dependency. Note that Cargo did +not start publishing `Cargo.lock` files until version 1.37, which means +packages published with prior versions will not have a `Cargo.lock` file +available. + +== OPTIONS + +=== Install Options + +*--vers* _VERSION_:: +*--version* _VERSION_:: + Specify a version to install. + +*--git* _URL_:: + Git URL to install the specified crate from. + +*--branch* _BRANCH_:: + Branch to use when installing from git. + +*--tag* _TAG_:: + Tag to use when installing from git. + +*--rev* _SHA_:: + Specific commit to use when installing from git. + +*--path* _PATH_:: + Filesystem path to local crate to install. + +*--list*:: + List all installed packages and their versions. + +*-f*:: +*--force*:: + Force overwriting existing crates or binaries. This can be used to + reinstall or upgrade a crate. + + +*--bin* _NAME_...:: + Install only the specified binary. + +*--bins*:: + Install all binaries. + +*--example* _NAME_...:: + Install only the specified example. + +*--examples*:: + Install all examples. + +*--root* _DIR_:: + Directory to install packages into. + +include::options-registry.adoc[] + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +*--debug*:: + Build with the `dev` profile instead the `release` profile. + +=== Manifest Options + +include::options-locked.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Install a package from crates.io: + + cargo install ripgrep + +. Reinstall or upgrade a package: + + cargo install ripgrep --force + +== SEE ALSO +man:cargo[1], man:cargo-uninstall[1], man:cargo-search[1], man:cargo-publish[1] diff --git a/src/doc/man/cargo-locate-project.adoc b/src/doc/man/cargo-locate-project.adoc new file mode 100644 index 00000000000..adfc7a39ebf --- /dev/null +++ b/src/doc/man/cargo-locate-project.adoc @@ -0,0 +1,46 @@ += cargo-locate-project(1) +:idprefix: cargo_locate-project_ +:doctype: manpage + +== NAME + +cargo-locate-project - Print a JSON representation of a Cargo.toml file's location + +== SYNOPSIS + +`cargo locate-project [_OPTIONS_]` + +== DESCRIPTION + +This command will print a JSON object to stdout with the full path to the +`Cargo.toml` manifest. + +See also man:cargo-metadata[1] which is capable of returning the path to a +workspace root. + +== OPTIONS + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Display the path to the manifest based on the current directory: + + cargo locate-project + +== SEE ALSO +man:cargo[1], man:cargo-metadata[1] diff --git a/src/doc/man/cargo-login.adoc b/src/doc/man/cargo-login.adoc new file mode 100644 index 00000000000..17d0e66ad41 --- /dev/null +++ b/src/doc/man/cargo-login.adoc @@ -0,0 +1,51 @@ += cargo-login(1) +:idprefix: cargo_login_ +:doctype: manpage + +== NAME + +cargo-login - Save an API token from the registry locally + +== SYNOPSIS + +`cargo login [_OPTIONS_] [_TOKEN_]` + +== DESCRIPTION + +This command will save the API token to disk so that commands that require +authentication, such as man:cargo-publish[1], will be automatically +authenticated. The token is saved in `$CARGO_HOME/credentials`. `CARGO_HOME` +defaults to `.cargo` in your home directory. + +If the _TOKEN_ argument is not specified, it will be read from stdin. + +The API token for crates.io may be retrieved from https://crates.io/me. + +Take care to keep the token secret, it should not be shared with anyone else. + +== OPTIONS + +=== Login Options + +include::options-registry.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Save the API token to disk: + + cargo login + +== SEE ALSO +man:cargo[1], man:cargo-publish[1] diff --git a/src/doc/man/cargo-metadata.adoc b/src/doc/man/cargo-metadata.adoc new file mode 100644 index 00000000000..cffad736831 --- /dev/null +++ b/src/doc/man/cargo-metadata.adoc @@ -0,0 +1,290 @@ += cargo-metadata(1) +:idprefix: cargo_metadata_ +:doctype: manpage +:source-highlighter: highlightjs + +== NAME + +cargo-metadata - Machine-readable metadata about the current package + +== SYNOPSIS + +`cargo metadata [_OPTIONS_]` + +== DESCRIPTION + +Output the resolved dependencies of a package, the concrete used versions +including overrides, in JSON to stdout. + +It is recommended to include the `--format-version` flag to future-proof +your code to ensure the output is in the format you are expecting. + +See the link:https://crates.io/crates/cargo_metadata[cargo_metadata crate] +for a Rust API for reading the metadata. + +== OUTPUT FORMAT + +The output has the following format: + +[source,javascript] +---- +{ + /* Array of all packages in the workspace. + It also includes all feature-enabled dependencies unless --no-deps is used. + */ + "packages": [ + { + /* The name of the package. */ + "name": "my-package", + /* The version of the package. */ + "version": "0.1.0", + /* The Package ID, a unique identifier for referring to the package. */ + "id": "my-package 0.1.0 (path+file:///path/to/my-package)", + /* The license value from the manifest, or null. */ + "license": "MIT/Apache-2.0", + /* The license-file value from the manifest, or null. */ + "license_file": "LICENSE", + /* The description value from the manifest, or null. */ + "description": "Package description.", + /* The source ID of the package. This represents where + a package is retrieved from. + This is null for path dependencies and workspace members. + For other dependencies, it is a string with the format: + - "registry+URL" for registry-based dependencies. + Example: "registry+https://github.com/rust-lang/crates.io-index" + - "git+URL" for git-based dependencies. + Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" + */ + "source": null, + /* Array of dependencies declared in the package's manifest. */ + "dependencies": [ + { + /* The name of the dependency. */ + "name": "bitflags", + /* The source ID of the dependency. May be null, see + description for the package source. + */ + "source": "registry+https://github.com/rust-lang/crates.io-index", + /* The version requirement for the dependency. + Dependencies without a version requirement have a value of "*". + */ + "req": "^1.0", + /* The dependency kind. + "dev", "build", or null for a normal dependency. + */ + "kind": null, + /* If the dependency is renamed, this is the new name for + the dependency as a string. null if it is not renamed. + */ + "rename": null, + /* Boolean of whether or not this is an optional dependency. */ + "optional": false, + /* Boolean of whether or not default features are enabled. */ + "uses_default_features": true, + /* Array of features enabled. */ + "features": [], + /* The target platform for the dependency. + null if not a target dependency. + */ + "target": "cfg(windows)", + /* A string of the URL of the registry this dependency is from. + If not specified or null, the dependency is from the default + registry (crates.io). + */ + "registry": null + } + ], + /* Array of Cargo targets. */ + "targets": [ + { + /* Array of target kinds. + - lib targets list the `crate-type` values from the + manifest such as "lib", "rlib", "dylib", + "proc-macro", etc. (default ["lib"]) + - binary is ["bin"] + - example is ["example"] + - integration test is ["test"] + - benchmark is ["bench"] + - build script is ["custom-build"] + */ + "kind": [ + "bin" + ], + /* Array of crate types. + - lib and example libraries list the `crate-type` values + from the manifest such as "lib", "rlib", "dylib", + "proc-macro", etc. (default ["lib"]) + - all other target kinds are ["bin"] + */ + "crate_types": [ + "bin" + ], + /* The name of the target. */ + "name": "my-package", + /* Absolute path to the root source file of the target. */ + "src_path": "/path/to/my-package/src/main.rs", + /* The Rust edition of the target. + Defaults to the package edition. + */ + "edition": "2018", + /* Array of required features. + This property is not included if no required features are set. + */ + "required-features": ["feat1"], + /* Whether or not this target has doc tests enabled, and + the target is compatible with doc testing. + */ + "doctest": false + } + ], + /* Set of features defined for the package. + Each feature maps to an array of features or dependencies it + enables. + */ + "features": { + "default": [ + "feat1" + ], + "feat1": [], + "feat2": [] + }, + /* Absolute path to this package's manifest. */ + "manifest_path": "/path/to/my-package/Cargo.toml", + /* Package metadata. + This is null if no metadata is specified. + */ + "metadata": { + "docs": { + "rs": { + "all-features": true + } + } + }, + /* Array of authors from the manifest. + Empty array if no authors specified. + */ + "authors": [ + "Jane Doe " + ], + /* Array of categories from the manifest. */ + "categories": [ + "command-line-utilities" + ], + /* Array of keywords from the manifest. */ + "keywords": [ + "cli" + ], + /* The readme value from the manifest or null if not specified. */ + "readme": "README.md", + /* The repository value from the manifest or null if not specified. */ + "repository": "https://github.com/rust-lang/cargo", + /* The default edition of the package. + Note that individual targets may have different editions. + */ + "edition": "2018", + /* Optional string that is the name of a native library the package + is linking to. + */ + "links": null, + } + ], + /* Array of members of the workspace. + Each entry is the Package ID for the package. + */ + "workspace_members": [ + "my-package 0.1.0 (path+file:///path/to/my-package)", + ], + /* The resolved dependency graph, with the concrete versions and features + selected. The set depends on the enabled features. + This is null if --no-deps is specified. + */ + "resolve": { + /* Array of nodes within the dependency graph. + Each node is a package. + */ + "nodes": [ + { + /* The Package ID of this node. */ + "id": "my-package 0.1.0 (path+file:///path/to/my-package)", + /* The dependencies of this package, an array of Package IDs. */ + "dependencies": [ + "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" + ], + /* The dependencies of this package. This is an alternative to + "dependencies" which contains additional information. In + particular, this handles renamed dependencies. + */ + "deps": [ + { + /* The name of the dependency's library target. + If this is a renamed dependency, this is the new + name. + */ + "name": "bitflags", + /* The Package ID of the dependency. */ + "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" + } + ], + /* Array of features enabled on this package. */ + "features": [ + "default" + ] + } + ], + /* The root package of the workspace. + This is null if this is a virtual workspace. Otherwise it is + the Package ID of the root package. + */ + "root": "my-package 0.1.0 (path+file:///path/to/my-package)" + }, + /* The absolute path to the build directory where Cargo places its output. */ + "target_directory": "/path/to/my-package/target", + /* The version of the schema for this metadata structure. + This will be changed if incompatible changes are ever made. + */ + "version": 1, + /* The absolute path to the root of the workspace. */ + "workspace_root": "/path/to/my-package" +} +---- + +== OPTIONS + +=== Output Options + +*--no-deps*:: + Output information only about the workspace members and don't fetch + dependencies. + +*--format-version* _VERSION_:: + Specify the version of the output format to use. Currently `1` is the only + possible value. + +include::options-features.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Output JSON about the current package: + + cargo metadata --format-version=1 + +== SEE ALSO +man:cargo[1] diff --git a/src/doc/man/cargo-new.adoc b/src/doc/man/cargo-new.adoc new file mode 100644 index 00000000000..6587a378960 --- /dev/null +++ b/src/doc/man/cargo-new.adoc @@ -0,0 +1,50 @@ += cargo-new(1) +:idprefix: cargo_new_ +:doctype: manpage + +== NAME + +cargo-new - Create a new Cargo package + +== SYNOPSIS + +`cargo new [_OPTIONS_] _PATH_` + +== DESCRIPTION + +This command will create a new Cargo package in the given directory. This +includes a simple template with a `Cargo.toml` manifest, sample source file, +and a VCS ignore file. If the directory is not already in a VCS repository, +then a new repository is created (see `--vcs` below). + +include::description-new-authors.adoc[] + +See man:cargo-init[1] for a similar command which will create a new manifest +in an existing directory. + +== OPTIONS + +=== New Options + +include::options-new.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Create a binary Cargo package in the given directory: + + cargo new foo + +== SEE ALSO +man:cargo[1], man:cargo-init[1] diff --git a/src/doc/man/cargo-owner.adoc b/src/doc/man/cargo-owner.adoc new file mode 100644 index 00000000000..63e6e309d1b --- /dev/null +++ b/src/doc/man/cargo-owner.adoc @@ -0,0 +1,80 @@ += cargo-owner(1) +:idprefix: cargo_owner_ +:doctype: manpage + +== NAME + +cargo-owner - Manage the owners of a crate on the registry + +== SYNOPSIS + +[%hardbreaks] +`cargo owner [_OPTIONS_] --add _LOGIN_ [_CRATE_]` +`cargo owner [_OPTIONS_] --remove _LOGIN_ [_CRATE_]` +`cargo owner [_OPTIONS_] --list [_CRATE_]` + +== DESCRIPTION + +This command will modify the owners for a crate on the registry. Owners of a +crate can upload new versions and yank old versions. Non-team owners can also +modify the set of owners, so take care! + +This command requires you to be authenticated with either the `--token` option +or using man:cargo-login[1]. + +If the crate name is not specified, it will use the package name from the +current directory. + +See linkcargo:reference/publishing.html#cargo-owner[the reference] for more +information about owners and publishing. + +== OPTIONS + +=== Owner Options + +*-a*:: +*--add* _LOGIN_...:: + Invite the given user or team as an owner. + +*-r*:: +*--remove* _LOGIN_...:: + Remove the given user or team as an owner. + +*-l*:: +*--list*:: + List owners of a crate. + +include::options-token.adoc[] + +include::options-index.adoc[] + +include::options-registry.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. List owners of a package: + + cargo owner --list foo + +. Invite an owner to a package: + + cargo owner --add username foo + +. Remove an owner from a package: + + cargo owner --remove username foo + +== SEE ALSO +man:cargo[1], man:cargo-login[1], man:cargo-publish[1] diff --git a/src/doc/man/cargo-package.adoc b/src/doc/man/cargo-package.adoc new file mode 100644 index 00000000000..0c1b5ac97c6 --- /dev/null +++ b/src/doc/man/cargo-package.adoc @@ -0,0 +1,98 @@ += cargo-package(1) +:idprefix: cargo_package_ +:doctype: manpage +:actionverb: Package + +== NAME + +cargo-package - Assemble the local package into a distributable tarball + +== SYNOPSIS + +`cargo package [_OPTIONS_]` + +== DESCRIPTION + +This command will create a distributable, compressed `.crate` file with the +source code of the package in the current directory. The resulting file will +be stored in the `target/package` directory. This performs the following +steps: + +. Load and check the current workspace, performing some basic checks. + - Path dependencies are not allowed unless they have a version key. Cargo + will ignore the path key for dependencies in published packages. +. Create the compressed `.crate` file. + - The original `Cargo.toml` file is rewritten and normalized. + - `[patch]`, `[replace]`, and `[workspace]` sections are removed from the + manifest. + - `Cargo.lock` is automatically included if the package contains an + executable binary or example target. man:cargo-install[1] will use the + packaged lock file if the `--locked` flag is used. + - A `.cargo_vcs_info.json` file is included that contains information + about the current VCS checkout hash if available (not included with + `--allow-dirty`). +. Extract the `.crate` file and build it to verify it can build. +. Check that build scripts did not modify any source files. + +The list of files included can be controlled with the `include` and `exclude` +fields in the manifest. + +See linkcargo:reference/publishing.html[the reference] for more details about +packaging and publishing. + +== OPTIONS + +=== Package Options + +*-l*:: +*--list*:: + Print files included in a package without making one. + +*--no-verify*:: + Don't verify the contents by building them. + +*--no-metadata*:: + Ignore warnings about a lack of human-usable metadata (such as the + description or the license). + +*--allow-dirty*:: + Allow working directories with uncommitted VCS changes to be packaged. + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-target-dir.adoc[] + +include::options-features.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Create a compressed `.crate` file of the current package: + + cargo package + +== SEE ALSO +man:cargo[1], man:cargo-publish[1] diff --git a/src/doc/man/cargo-pkgid.adoc b/src/doc/man/cargo-pkgid.adoc new file mode 100644 index 00000000000..98ff9dd9d89 --- /dev/null +++ b/src/doc/man/cargo-pkgid.adoc @@ -0,0 +1,94 @@ += cargo-pkgid(1) +:idprefix: cargo_pkgid_ +:doctype: manpage + +== NAME + +cargo-pkgid - Print a fully qualified package specification + +== SYNOPSIS + +`cargo pkgid [_OPTIONS_] [_SPEC_]` + +== DESCRIPTION + +Given a _SPEC_ argument, print out the fully qualified package ID specifier +for a package or dependency in the current workspace. This command will +generate an error if _SPEC_ is ambiguous as to which package it refers to in +the dependency graph. If no _SPEC_ is given, then the specifier for the local +package is printed. + +This command requires that a lockfile is available and dependencies have been +fetched. + +A package specifier consists of a name, version, and source URL. You are +allowed to use partial specifiers to succinctly match a specific package as +long as it matches only one package. The format of a _SPEC_ can be one of the +following: + +[%autowidth] +.SPEC Query Format +|=== +|SPEC Structure |Example SPEC + +|__NAME__ +|`bitflags` + +|__NAME__``:``__VERSION__ +|`bitflags:1.0.4` + +|__URL__ +|`https://github.com/rust-lang/cargo` + +|__URL__``#``__VERSION__ +|`https://github.com/rust-lang/cargo#0.33.0` + +|__URL__``#``__NAME__ +|`https://github.com/rust-lang/crates.io-index#bitflags` + +|__URL__``#``__NAME__``:``__VERSION__ +|`https://github.com/rust-lang/cargo#crates-io:0.21.0` +|=== + +== OPTIONS + +=== Package Selection + +*-p* _SPEC_:: +*--package* _SPEC_:: + Get the package ID for the given package instead of the current package. + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Retrieve package specification for `foo` package: + + cargo pkgid foo + +. Retrieve package specification for version 1.0.0 of `foo`: + + cargo pkgid foo:1.0.0 + +. Retrieve package specification for `foo` from crates.io: + + cargo pkgid https://github.com/rust-lang/crates.io-index#foo + +== SEE ALSO +man:cargo[1], man:cargo-generate-lockfile[1], man:cargo-metadata[1] diff --git a/src/doc/man/cargo-publish.adoc b/src/doc/man/cargo-publish.adoc new file mode 100644 index 00000000000..f63c38ba628 --- /dev/null +++ b/src/doc/man/cargo-publish.adoc @@ -0,0 +1,90 @@ += cargo-publish(1) +:idprefix: cargo_publish_ +:doctype: manpage +:actionverb: Publish + +== NAME + +cargo-publish - Upload a package to the registry + +== SYNOPSIS + +`cargo publish [_OPTIONS_]` + +== DESCRIPTION + +This command will create a distributable, compressed `.crate` file with the +source code of the package in the current directory and upload it to a +registry. The default registry is https://crates.io. This performs the +following steps: + +. Performs a few checks, including: + - Checks the `package.publish` key in the manifest for restrictions on which + registries you are allowed to publish to. +. Create a `.crate` file by following the steps in man:cargo-package[1]. +. Upload the crate to the registry. Note that the server will perform + additional checks on the crate. + +This command requires you to be authenticated with either the `--token` option +or using man:cargo-login[1]. + +See linkcargo:reference/publishing.html[the reference] for more details about +packaging and publishing. + +== OPTIONS + +=== Publish Options + +*--dry-run*:: + Perform all checks without uploading. + +include::options-token.adoc[] + +*--no-verify*:: + Don't verify the contents by building them. + +*--allow-dirty*:: + Allow working directories with uncommitted VCS changes to be packaged. + +include::options-index.adoc[] + +include::options-registry.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-target-dir.adoc[] + +include::options-features.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Publish the current package: + + cargo publish + +== SEE ALSO +man:cargo[1], man:cargo-package[1], man:cargo-login[1] diff --git a/src/doc/man/cargo-run.adoc b/src/doc/man/cargo-run.adoc new file mode 100644 index 00000000000..8aa64e75776 --- /dev/null +++ b/src/doc/man/cargo-run.adoc @@ -0,0 +1,90 @@ += cargo-run(1) +:idprefix: cargo_run_ +:doctype: manpage +:actionverb: Run + +== NAME + +cargo-run - Run the current package + +== SYNOPSIS + +`cargo run [_OPTIONS_] [-- _ARGS_]` + +== DESCRIPTION + +Run a binary or example of the local package. + +All the arguments following the two dashes (`--`) are passed to the binary to +run. If you're passing arguments to both Cargo and the binary, the ones after +`--` go to the binary, the ones before go to Cargo. + +== OPTIONS + +=== Package Selection + +include::options-package.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo run` will run the binary +target. If there are multiple binary targets, you must pass a target flag to +choose one. Or, the `default-run` field may be specified in the `[package]` +section of `Cargo.toml` to choose the name of the binary to run by default. + +*--bin* _NAME_:: + Run the specified binary. + +*--example* _NAME_:: + Run the specified example. + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-release.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +=== Display Options + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +include::section-profiles.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Build the local package and run its main target (assuming only one binary): + + cargo run + +. Run an example with extra arguments: + + cargo run --example exname -- --exoption exarg1 exarg2 + +== SEE ALSO +man:cargo[1], man:cargo-build[1] diff --git a/src/doc/man/cargo-rustc.adoc b/src/doc/man/cargo-rustc.adoc new file mode 100644 index 00000000000..7ba60cb19e1 --- /dev/null +++ b/src/doc/man/cargo-rustc.adoc @@ -0,0 +1,94 @@ += cargo-rustc(1) +:idprefix: cargo_rustc_ +:doctype: manpage +:actionverb: Build + +== NAME + +cargo-rustc - Compile the current package, and pass extra options to the compiler + +== SYNOPSIS + +`cargo rustc [_OPTIONS_] [-- _ARGS_]` + +== DESCRIPTION + +The specified target for the current package (or package specified by `-p` if +provided) will be compiled along with all of its dependencies. The specified +_ARGS_ will all be passed to the final compiler invocation, not any of the +dependencies. Note that the compiler will still unconditionally receive +arguments such as `-L`, `--extern`, and `--crate-type`, and the specified +_ARGS_ will simply be added to the compiler invocation. + +See https://doc.rust-lang.org/rustc/index.html for documentation on rustc +flags. + +include::description-one-target.adoc[] +To pass flags to all compiler processes spawned by Cargo, use the `RUSTFLAGS` +environment variable or the `build.rustflags` +linkcargo:reference/config.html[config value]. + +== OPTIONS + +=== Package Selection + +include::options-package.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo rustc` will build all +binary and library targets of the selected package. + +include::options-targets.adoc[] + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-release.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +=== Display Options + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +include::section-profiles.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Check if your package (not including dependencies) uses unsafe code: + + cargo rustc --lib -- -D unsafe-code + +. Try an experimental flag on the nightly compiler, such as this which prints + the size of every type: + + cargo rustc --lib -- -Z print-type-sizes + +== SEE ALSO +man:cargo[1], man:cargo-build[1], man:rustc[1] diff --git a/src/doc/man/cargo-rustdoc.adoc b/src/doc/man/cargo-rustdoc.adoc new file mode 100644 index 00000000000..6d7fea76141 --- /dev/null +++ b/src/doc/man/cargo-rustdoc.adoc @@ -0,0 +1,96 @@ += cargo-rustdoc(1) +:idprefix: cargo_rustdoc_ +:doctype: manpage +:actionverb: Document + +== NAME + +cargo-rustdoc - Build a package's documentation, using specified custom flags + +== SYNOPSIS + +`cargo rustdoc [_OPTIONS_] [-- _ARGS_]` + +== DESCRIPTION + +The specified target for the current package (or package specified by `-p` if +provided) will be documented with the specified _ARGS_ being passed to the +final rustdoc invocation. Dependencies will not be documented as part of this +command. Note that rustdoc will still unconditionally receive arguments such +as `-L`, `--extern`, and `--crate-type`, and the specified _ARGS_ will simply +be added to the rustdoc invocation. + +See https://doc.rust-lang.org/rustdoc/index.html for documentation on rustdoc +flags. + +include::description-one-target.adoc[] +To pass flags to all rustdoc processes spawned by Cargo, use the +`RUSTDOCFLAGS` environment variable or the `build.rustdocflags` configuration +option. + +== OPTIONS + +=== Documentation Options + +*--open*:: + Open the docs in a browser after building them. + +=== Package Selection + +include::options-package.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo rustdoc` will document all +binary and library targets of the selected package. The binary will be skipped +if its name is the same as the lib target. Binaries are skipped if they have +`required-features` that are missing. + +include::options-targets.adoc[] + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-release.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +=== Display Options + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +include::options-jobs.adoc[] + +include::section-profiles.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Build documentation with custom CSS included from a given file: + + cargo rustdoc --lib -- --extend-css extra.css + +== SEE ALSO +man:cargo[1], man:cargo-doc[1], man:rustdoc[1] diff --git a/src/doc/man/cargo-search.adoc b/src/doc/man/cargo-search.adoc new file mode 100644 index 00000000000..4d51285928f --- /dev/null +++ b/src/doc/man/cargo-search.adoc @@ -0,0 +1,49 @@ += cargo-search(1) +:idprefix: cargo_search_ +:doctype: manpage + +== NAME + +cargo-search - Search packages in crates.io + +== SYNOPSIS + +`cargo search [_OPTIONS_] [_QUERY_...]` + +== DESCRIPTION + +This performs a textual search for crates on https://crates.io. The matching +crates will be displayed along with their description in TOML format suitable +for copying into a `Cargo.toml` manifest. + +== OPTIONS + +=== Search Options + +*--limit* _LIMIT_:: + Limit the number of results (default: 10, max: 100). + +include::options-index.adoc[] + +include::options-registry.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Search for a package from crates.io: + + cargo search serde + +== SEE ALSO +man:cargo[1], man:cargo-install[1], man:cargo-publish[1] diff --git a/src/doc/man/cargo-test.adoc b/src/doc/man/cargo-test.adoc new file mode 100644 index 00000000000..7f198526572 --- /dev/null +++ b/src/doc/man/cargo-test.adoc @@ -0,0 +1,152 @@ += cargo-test(1) +:idprefix: cargo_test_ +:doctype: manpage +:actionverb: Test +:nouns: tests + +== NAME + +cargo-test - Execute unit and integration tests of a package + +== SYNOPSIS + +`cargo test [_OPTIONS_] [TESTNAME] [-- _TEST-OPTIONS_]` + +== DESCRIPTION + +Compile and execute unit and integration tests. + +The test filtering argument `TESTNAME` and all the arguments following the two +dashes (`--`) are passed to the test binaries and thus to _libtest_ (rustc's +built in unit-test and micro-benchmarking framework). If you're passing +arguments to both Cargo and the binary, the ones after `--` go to the binary, +the ones before go to Cargo. For details about libtest's arguments see the +output of `cargo test -- --help`. As an example, this will run all tests with +`foo` in their name on 3 threads in parallel: + + cargo test foo -- --test-threads 3 + +Tests are built with the `--test` option to `rustc` which creates an +executable with a `main` function that automatically runs all functions +annotated with the `\#[test]` attribute in multiple threads. `#[bench]` +annotated functions will also be run with one iteration to verify that they +are functional. + +The libtest harness may be disabled by setting `harness = false` in the target +manifest settings, in which case your code will need to provide its own `main` +function to handle running tests. + +Documentation tests are also run by default, which is handled by `rustdoc`. It +extracts code samples from documentation comments and executes them. See the +link:https://doc.rust-lang.org/rustdoc/[rustdoc book] for more information on +writing doc tests. + +== OPTIONS + +=== Test Options + +include::options-test.adoc[] + +=== Package Selection + +include::options-packages.adoc[] + +=== Target Selection + +When no target selection options are given, `cargo test` will build the +following targets of the selected packages: + +- lib — used to link with binaries, examples, integration tests, and doc tests +- bins (only if integration tests are built and required features are + available) +- examples — to ensure they compile +- lib as a unit test +- bins as unit tests +- integration tests +- doc tests for the lib target + +The default behavior can be changed by setting the `test` flag for the target +in the manifest settings. Setting examples to `test = true` will build and run +the example as a test. Setting targets to `test = false` will stop them from +being tested by default. Target selection options that take a target by name +ignore the `test` flag and will always test the given target. + +Doc tests for libraries may be disabled by setting `doctest = false` for the +library in the manifest. + +include::options-targets.adoc[] + +*--doc*:: + Test only the library's documentation. This cannot be mixed with other + target options. + +include::options-features.adoc[] + +=== Compilation Options + +include::options-target-triple.adoc[] + +include::options-release.adoc[] + +=== Output Options + +include::options-target-dir.adoc[] + +=== Display Options + +By default the Rust test harness hides output from test execution to keep +results readable. Test output can be recovered (e.g., for debugging) by passing +`--nocapture` to the test binaries: + + cargo test -- --nocapture + +include::options-display.adoc[] + +include::options-message-format.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +=== Miscellaneous Options + +The `--jobs` argument affects the building of the test executable but does not +affect how many threads are used when running the tests. The Rust test harness +includes an option to control the number of threads used: + + cargo test -j 2 -- --test-threads=2 + +include::options-jobs.adoc[] + +include::section-profiles.adoc[] + +Unit tests are separate executable artifacts which use the `test`/`bench` +profiles. Example targets are built the same as with `cargo build` (using the +`dev`/`release` profiles) unless you are building them with the test harness +(by setting `test = true` in the manifest or using the `--example` flag) in +which case they use the `test`/`bench` profiles. Library targets are built +with the `dev`/`release` profiles when linked to an integration test, binary, +or doctest. + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Execute all the unit and integration tests of the current package: + + cargo test + +. Run only a specific test within a specific integration test: + + cargo test --test int_test_name -- modname::test_name + +== SEE ALSO +man:cargo[1], man:cargo-bench[1] diff --git a/src/doc/man/cargo-uninstall.adoc b/src/doc/man/cargo-uninstall.adoc new file mode 100644 index 00000000000..b75a10401e2 --- /dev/null +++ b/src/doc/man/cargo-uninstall.adoc @@ -0,0 +1,57 @@ += cargo-uninstall(1) +:idprefix: cargo_uninstall_ +:doctype: manpage + +== NAME + +cargo-uninstall - Remove a Rust binary + +== SYNOPSIS + +`cargo uninstall [_OPTIONS_] [_SPEC_...]` + +== DESCRIPTION + +This command removes a package installed with man:cargo-install[1]. The _SPEC_ +argument is a package ID specification of the package to remove (see +man:cargo-pkgid[1]). + +By default all binaries are removed for a crate but the `--bin` and +`--example` flags can be used to only remove particular binaries. + +include::description-install-root.adoc[] + +== OPTIONS + +=== Install Options + +*-p*:: +*--package* _SPEC_...:: + Package to uninstall. + +*--bin* _NAME_...:: + Only uninstall the binary _NAME_. + +*--root* _DIR_:: + Directory to uninstall packages from. + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Uninstall a previously installed package. + + cargo uninstall ripgrep + +== SEE ALSO +man:cargo[1], man:cargo-install[1] diff --git a/src/doc/man/cargo-update.adoc b/src/doc/man/cargo-update.adoc new file mode 100644 index 00000000000..c8a5274350a --- /dev/null +++ b/src/doc/man/cargo-update.adoc @@ -0,0 +1,81 @@ += cargo-update(1) +:idprefix: cargo_update_ +:doctype: manpage + +== NAME + +cargo-update - Update dependencies as recorded in the local lock file + +== SYNOPSIS + +`cargo update [_OPTIONS_]` + +== DESCRIPTION + +This command will update dependencies in the `Cargo.lock` file to the latest +version. It requires that the `Cargo.lock` file already exists as generated +by commands such as man:cargo-build[1] or man:cargo-generate-lockfile[1]. + +== OPTIONS + +=== Update Options + +*-p* _SPEC_...:: +*--package* _SPEC_...:: + Update only the specified packages. This flag may be specified + multiple times. See man:cargo-pkgid[1] for the SPEC format. ++ +If packages are specified with the `-p` flag, then a conservative update of +the lockfile will be performed. This means that only the dependency specified +by SPEC will be updated. Its transitive dependencies will be updated only if +SPEC cannot be updated without updating dependencies. All other dependencies +will remain locked at their currently recorded versions. ++ +If `-p` is not specified, all dependencies are updated. + +*--aggressive*:: + When used with `-p`, dependencies of _SPEC_ are forced to update as well. + Cannot be used with `--precise`. + +*--precise* _PRECISE_:: + When used with `-p`, allows you to specify a specific version number to + set the package to. If the package comes from a git repository, this can + be a git revision (such as a SHA hash or tag). + +*--dry-run*:: + Displays what would be updated, but doesn't actually write the lockfile. + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Update all dependencies in the lockfile: + + cargo update + +. Update only specific dependencies: + + cargo update -p foo -p bar + +. Set a specific dependency to a specific version: + + cargo update -p foo --precise 1.2.3 + +== SEE ALSO +man:cargo[1], man:cargo-generate-lockfile[1] diff --git a/src/doc/man/cargo-vendor.adoc b/src/doc/man/cargo-vendor.adoc new file mode 100644 index 00000000000..7f52ec815d8 --- /dev/null +++ b/src/doc/man/cargo-vendor.adoc @@ -0,0 +1,75 @@ += cargo-vendor(1) +:idprefix: cargo_vendor_ +:doctype: manpage + +== NAME + +cargo-vendor - Vendor all dependencies locally + +== SYNOPSIS + +`cargo vendor [_OPTIONS_] [_PATH_]` + +== DESCRIPTION + +This cargo subcommand will vendor all crates.io and git dependencies for a +project into the specified directory at ``. After this command completes +the vendor directory specified by `` will contain all remote sources from +dependencies specified. Additional manifests beyond the default one can be +specified with the `-s` option. + +The `cargo vendor` command will also print out the configuration necessary +to use the vendored sources, which you will need to add to `.cargo/config`. + +== OPTIONS + +=== Owner Options + +*-s* _MANIFEST_:: +*--sync* _MANIFEST_:: + Specify extra `Cargo.toml` manifests to workspaces which should also be + vendored and synced to the output. + +*--no-delete*:: + Don't delete the "vendor" directory when vendoring, but rather keep all + existing contents of the vendor directory + +*--respect-source-config*:: + Instead of ignoring `[source]` configuration by default in `.cargo/config` + read it and use it when downloading crates from crates.io, for example + +=== Manifest Options + +include::options-manifest-path.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::options-locked.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Vendor all dependencies into a local "vendor" folder + + cargo vendor + +. Vendor all dependencies into a local "third-part/vendor" folder + + cargo vendor third-party/vendor + +. Vendor the current workspace as well as another to "vendor" + + cargo vendor -s ../path/to/Cargo.toml + +== SEE ALSO +man:cargo[1] + diff --git a/src/doc/man/cargo-verify-project.adoc b/src/doc/man/cargo-verify-project.adoc new file mode 100644 index 00000000000..7b963f8c5dd --- /dev/null +++ b/src/doc/man/cargo-verify-project.adoc @@ -0,0 +1,57 @@ += cargo-verify-project(1) +:idprefix: cargo_verify-project_ +:doctype: manpage + +== NAME + +cargo-verify-project - Check correctness of crate manifest + +== SYNOPSIS + +`cargo verify-project [_OPTIONS_]` + +== DESCRIPTION + +This command will parse the local manifest and check its validity. It emits a +JSON object with the result. A successful validation will display: + + {"success":"true"} + +An invalid workspace will display: + + {"invalid":"human-readable error message"} + +== OPTIONS + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-manifest-path.adoc[] + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +== Exit Status + +0:: + The workspace is OK. + +1:: + The workspace is invalid. + +== EXAMPLES + +. Check the current workspace for errors: + + cargo verify-project + +== SEE ALSO +man:cargo[1], man:cargo-package[1] diff --git a/src/doc/man/cargo-version.adoc b/src/doc/man/cargo-version.adoc new file mode 100644 index 00000000000..4c3bb7a1be3 --- /dev/null +++ b/src/doc/man/cargo-version.adoc @@ -0,0 +1,39 @@ += cargo-version(1) +:idprefix: cargo_version_ +:doctype: manpage + +== NAME + +cargo-version - Show version information + +== SYNOPSIS + +`cargo version [_OPTIONS_]` + +== DESCRIPTION + +Displays the version of Cargo. + +== OPTIONS + +*-v*:: +*--verbose*:: + Display additional version information. + +== EXAMPLES + +. Display the version: + + cargo version + +. The version is also available via flags: + + cargo --version + cargo -V + +. Display extra version information: + + cargo -Vv + +== SEE ALSO +man:cargo[1] diff --git a/src/doc/man/cargo-yank.adoc b/src/doc/man/cargo-yank.adoc new file mode 100644 index 00000000000..99f430826f7 --- /dev/null +++ b/src/doc/man/cargo-yank.adoc @@ -0,0 +1,64 @@ += cargo-yank(1) +:idprefix: cargo_yank_ +:doctype: manpage + +== NAME + +cargo-yank - Remove a pushed crate from the index + +== SYNOPSIS + +`cargo yank [_OPTIONS_] --vers _VERSION_ [_CRATE_]` + +== DESCRIPTION + +The yank command removes a previously published crate's version from the +server's index. This command does not delete any data, and the crate will +still be available for download via the registry's download link. + +Note that existing crates locked to a yanked version will still be able to +download the yanked version to use it. Cargo will, however, not allow any new +crates to be locked to any yanked version. + +This command requires you to be authenticated with either the `--token` option +or using man:cargo-login[1]. + +If the crate name is not specified, it will use the package name from the +current directory. + +== OPTIONS + +=== Owner Options + +*--vers* _VERSION_:: + The version to yank or un-yank. + +*--undo*:: + Undo a yank, putting a version back into the index. + +include::options-token.adoc[] + +include::options-index.adoc[] + +include::options-registry.adoc[] + +=== Display Options + +include::options-display.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== EXAMPLES + +. Yank a crate from the index: + + cargo yank --vers 1.0.7 foo + +== SEE ALSO +man:cargo[1], man:cargo-login[1], man:cargo-publish[1] diff --git a/src/doc/man/cargo.adoc b/src/doc/man/cargo.adoc new file mode 100644 index 00000000000..133b04b9a6e --- /dev/null +++ b/src/doc/man/cargo.adoc @@ -0,0 +1,217 @@ += cargo(1) +:doctype: manpage + +== NAME + +cargo - The Rust package manager + +== SYNOPSIS + +[%hardbreaks] +`cargo [_OPTIONS_] _COMMAND_ [_ARGS_]` +`cargo [_OPTIONS_] --version` +`cargo [_OPTIONS_] --list` +`cargo [_OPTIONS_] --help` +`cargo [_OPTIONS_] --explain _CODE_` + +== DESCRIPTION + +This program is a package manager and build tool for the Rust language, +available at . + +== COMMANDS + +=== Build Commands + +man:cargo-bench[1]:: + Execute benchmarks of a package. + +man:cargo-build[1]:: + Compile a package. + +man:cargo-check[1]:: + Check a local package and all of its dependencies for errors. + +man:cargo-clean[1]:: + Remove artifacts that Cargo has generated in the past. + +man:cargo-doc[1]:: + Build a package's documentation. + +man:cargo-fetch[1]:: + Fetch dependencies of a package from the network. + +man:cargo-fix[1]:: + Automatically fix lint warnings reported by rustc. + +man:cargo-run[1]:: + Run a binary or example of the local package. + +man:cargo-rustc[1]:: + Compile a package, and pass extra options to the compiler. + +man:cargo-rustdoc[1]:: + Build a package's documentation, using specified custom flags. + +man:cargo-test[1]:: + Execute unit and integration tests of a package. + +=== Manifest Commands + +man:cargo-generate-lockfile[1]:: + Generate `Cargo.lock` for a project. + +man:cargo-locate-project[1]:: + Print a JSON representation of a `Cargo.toml` file's location. + +man:cargo-metadata[1]:: + Output the resolved dependencies of a package, the concrete used versions + including overrides, in machine-readable format. + +man:cargo-pkgid[1]:: + Print a fully qualified package specification. + +man:cargo-update[1]:: + Update dependencies as recorded in the local lock file. + +man:cargo-verify-project[1]:: + Check correctness of crate manifest. + +=== Package Commands + +man:cargo-init[1]:: + Create a new Cargo package in an existing directory. + +man:cargo-install[1]:: + Build and install a Rust binary. + +man:cargo-new[1]:: + Create a new Cargo package. + +man:cargo-search[1]:: + Search packages in crates.io. + +man:cargo-uninstall[1]:: + Remove a Rust binary. + +=== Publishing Commands + +man:cargo-login[1]:: + Save an API token from the registry locally. + +man:cargo-owner[1]:: + Manage the owners of a crate on the registry. + +man:cargo-package[1]:: + Assemble the local package into a distributable tarball. + +man:cargo-publish[1]:: + Upload a package to the registry. + +man:cargo-yank[1]:: + Remove a pushed crate from the index. + +=== General Commands + +man:cargo-help[1]:: + Display help information about Cargo. + +man:cargo-version[1]:: + Show version information. + +== OPTIONS + +=== Special Options + +*-V*:: +*--version*:: + Print version info and exit. If used with `--verbose`, prints extra + information. + +*--list*:: + List all installed Cargo subcommands. If used with `--verbose`, prints + extra information. + +*--explain _CODE_*:: + Run `rustc --explain CODE` which will print out a detailed explanation of + an error message (for example, `E0004`). + +=== Display Options + +include::options-display.adoc[] + +=== Manifest Options + +include::options-locked.adoc[] + +=== Common Options + +include::options-common.adoc[] + +include::section-environment.adoc[] + +include::section-exit-status.adoc[] + +== FILES + +`~/.cargo/`:: + Default location for Cargo's "home" directory where it stores various + files. The location can be changed with the `CARGO_HOME` environment + variable. + +`$CARGO_HOME/bin/`:: + Binaries installed by man:cargo-install[1] will be located here. If using + rustup, executables distributed with Rust are also located here. + +`$CARGO_HOME/config`:: + The global configuration file. See linkcargo:reference/config.html[the reference] + for more information about configuration files. + +`.cargo/config`:: + Cargo automatically searches for a file named `.cargo/config` in the + current directory, and all parent directories. These configuration files + will be merged with the global configuration file. + +`$CARGO_HOME/credentials`:: + Private authentication information for logging in to a registry. + +`$CARGO_HOME/registry/`:: + This directory contains cached downloads of the registry index and any + downloaded dependencies. + +`$CARGO_HOME/git/`:: + This directory contains cached downloads of git dependencies. + +== EXAMPLES + +. Build a local package and all of its dependencies: + + cargo build + +. Build a package with optimizations: + + cargo build --release + +. Run tests for a cross-compiled target: + + cargo test --target i686-unknown-linux-gnu + +. Create a new package that builds an executable: + + cargo new foobar + +. Create a package in the current directory: + + mkdir foo && cd foo + cargo init . + +. Learn about a command's options and usage: + + cargo help clean + +== BUGS + +See https://github.com/rust-lang/cargo/issues for issues. + +== SEE ALSO +man:rustc[1], man:rustdoc[1] diff --git a/src/doc/man/description-install-root.adoc b/src/doc/man/description-install-root.adoc new file mode 100644 index 00000000000..d7773d3b20a --- /dev/null +++ b/src/doc/man/description-install-root.adoc @@ -0,0 +1,7 @@ +The installation root is determined, in order of precedence: + +- `--root` option +- `CARGO_INSTALL_ROOT` environment variable +- `install.root` Cargo linkcargo:reference/config.html[config value] +- `CARGO_HOME` environment variable +- `$HOME/.cargo` diff --git a/src/doc/man/description-new-authors.adoc b/src/doc/man/description-new-authors.adoc new file mode 100644 index 00000000000..0435295b726 --- /dev/null +++ b/src/doc/man/description-new-authors.adoc @@ -0,0 +1,24 @@ +The "authors" field in the manifest is determined from the environment or +configuration settings. A name is required and is determined from (first match +wins): + +- `cargo-new.name` Cargo config value +- `CARGO_NAME` environment variable +- `GIT_AUTHOR_NAME` environment variable +- `GIT_COMMITTER_NAME` environment variable +- `user.name` git configuration value +- `USER` environment variable +- `USERNAME` environment variable +- `NAME` environment variable + +The email address is optional and is determined from: + +- `cargo-new.email` Cargo config value +- `CARGO_EMAIL` environment variable +- `GIT_AUTHOR_EMAIL` environment variable +- `GIT_COMMITTER_EMAIL` environment variable +- `user.email` git configuration value +- `EMAIL` environment variable + +See linkcargo:reference/config.html[the reference] for more information about +configuration files. diff --git a/src/doc/man/description-one-target.adoc b/src/doc/man/description-one-target.adoc new file mode 100644 index 00000000000..7af18131f5f --- /dev/null +++ b/src/doc/man/description-one-target.adoc @@ -0,0 +1,4 @@ +This command requires that only one target is being compiled when additional +arguments are provided. If more than one target is available for the current +package the filters of `--lib`, `--bin`, etc, must be used to select which +target is compiled. diff --git a/src/doc/man/generated/cargo-bench.html b/src/doc/man/generated/cargo-bench.html new file mode 100644 index 00000000000..dc00b1fe104 --- /dev/null +++ b/src/doc/man/generated/cargo-bench.html @@ -0,0 +1,483 @@ +

NAME

+
+

cargo-bench - Execute benchmarks of a package

+
+
+

SYNOPSIS

+
+
+

cargo bench [OPTIONS] [BENCHNAME] [-- BENCH-OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

Compile and execute benchmarks.

+
+
+

The benchmark filtering argument BENCHNAME and all the arguments following +the two dashes (--) are passed to the benchmark binaries and thus to +libtest (rustc’s built in unit-test and micro-benchmarking framework). If +you’re passing arguments to both Cargo and the binary, the ones after -- go +to the binary, the ones before go to Cargo. For details about libtest’s +arguments see the output of cargo bench — --help. As an example, this will +run only the benchmark named foo (and skip other similarly named benchmarks +like foobar):

+
+
+
+
cargo bench -- foo --exact
+
+
+
+

Benchmarks are built with the --test option to rustc which creates an +executable with a main function that automatically runs all functions +annotated with the #[bench] attribute. Cargo passes the --bench flag to +the test harness to tell it to run only benchmarks.

+
+
+

The libtest harness may be disabled by setting harness = false in the target +manifest settings, in which case your code will need to provide its own main +function to handle running benchmarks.

+
+
+
+
+

OPTIONS

+
+
+

Benchmark Options

+
+
+
--no-run
+
+

Compile, but don’t run benchmarks.

+
+
--no-fail-fast
+
+

Run all benchmarks regardless of failure. Without this flag, Cargo will exit +after the first executable fails. The Rust test harness will run all +benchmarks within the executable to completion, this flag only applies to +the executable as a whole.

+
+
+
+
+
+

Package Selection

+
+

By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (--all is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the workspace.default-members key in the root Cargo.toml +manifest.

+
+
+
+
-p SPEC…​
+
--package SPEC…​
+
+

Benchmark only the specified packages. See cargo-pkgid(1) for the +SPEC format. This flag may be specified multiple times.

+
+
--all
+
+

Benchmark all members in the workspace.

+
+
--exclude SPEC…​
+
+

Exclude the specified packages. Must be used in conjunction with the +--all flag. This flag may be specified multiple times.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo bench will build the +following targets of the selected packages:

+
+
+
    +
  • +

    lib — used to link with binaries and benchmarks

    +
  • +
  • +

    bins (only if benchmark targets are built and required features are +available)

    +
  • +
  • +

    lib as a benchmark

    +
  • +
  • +

    bins as benchmarks

    +
  • +
  • +

    benchmark targets

    +
  • +
+
+
+

The default behavior can be changed by setting the bench flag for the target +in the manifest settings. Setting examples to bench = true will build and +run the example as a benchmark. Setting targets to bench = false will stop +them from being benchmarked by default. Target selection options that take a +target by name ignore the bench flag and will always benchmark the given +target.

+
+
+

Passing target selection flags will benchmark only the +specified targets.

+
+
+
+
--lib
+
+

Benchmark the package’s library.

+
+
--bin NAME…​
+
+

Benchmark the specified binary. This flag may be specified multiple times.

+
+
--bins
+
+

Benchmark all binary targets.

+
+
--example NAME…​
+
+

Benchmark the specified example. This flag may be specified multiple times.

+
+
--examples
+
+

Benchmark all example targets.

+
+
--test NAME…​
+
+

Benchmark the specified integration test. This flag may be specified multiple +times.

+
+
--tests
+
+

Benchmark all targets in test mode that have the test = true manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the test flag in the +manifest settings for the target.

+
+
--bench NAME…​
+
+

Benchmark the specified benchmark. This flag may be specified multiple times.

+
+
--benches
+
+

Benchmark all targets in benchmark mode that have the bench = true +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the bench flag in the +manifest settings for the target.

+
+
--all-targets
+
+

Benchmark all targets. This is equivalent to specifying --lib --bins +--tests --benches --examples.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Benchmark for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Display Options

+
+

By default the Rust test harness hides output from benchmark execution to keep +results readable. Benchmark output can be recovered (e.g., for debugging) by +passing --nocapture to the benchmark binaries:

+
+
+
+
cargo bench -- --nocapture
+
+
+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+

The --jobs argument affects the building of the benchmark executable but +does not affect how many threads are used when running the benchmarks. The +Rust test harness runs benchmarks serially in a single thread.

+
+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Benchmarks are always built with the bench profile. Binary and lib targets +are built separately as benchmarks with the bench profile. Library targets +are built with the release profiles when linked to binaries and benchmarks. +Dependencies use the release profile.

+
+
+

If you need a debug build of a benchmark, try building it with +cargo-build(1) which will use the test profile which is by default +unoptimized and includes debug information. You can then run the debug-enabled +benchmark manually.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Build and execute all the benchmarks of the current package:

    +
    +
    +
    cargo bench
    +
    +
    +
  2. +
  3. +

    Run only a specific benchmark within a specific benchmark target:

    +
    +
    +
    cargo bench --bench bench_name -- modname::some_benchmark
    +
    +
    +
  4. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-build.html b/src/doc/man/generated/cargo-build.html new file mode 100644 index 00000000000..593d454caa5 --- /dev/null +++ b/src/doc/man/generated/cargo-build.html @@ -0,0 +1,446 @@ +

NAME

+
+

cargo-build - Compile the current package

+
+
+

SYNOPSIS

+
+
+

cargo build [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

Compile local packages and all of their dependencies.

+
+
+
+
+

OPTIONS

+
+
+

Package Selection

+
+

By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (--all is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the workspace.default-members key in the root Cargo.toml +manifest.

+
+
+
+
-p SPEC…​
+
--package SPEC…​
+
+

Build only the specified packages. See cargo-pkgid(1) for the +SPEC format. This flag may be specified multiple times.

+
+
--all
+
+

Build all members in the workspace.

+
+
--exclude SPEC…​
+
+

Exclude the specified packages. Must be used in conjunction with the +--all flag. This flag may be specified multiple times.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo build will build all +binary and library targets of the selected packages. Binaries are skipped if +they have required-features that are missing.

+
+
+

Passing target selection flags will build only the +specified targets.

+
+
+
+
--lib
+
+

Build the package’s library.

+
+
--bin NAME…​
+
+

Build the specified binary. This flag may be specified multiple times.

+
+
--bins
+
+

Build all binary targets.

+
+
--example NAME…​
+
+

Build the specified example. This flag may be specified multiple times.

+
+
--examples
+
+

Build all example targets.

+
+
--test NAME…​
+
+

Build the specified integration test. This flag may be specified multiple +times.

+
+
--tests
+
+

Build all targets in test mode that have the test = true manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the test flag in the +manifest settings for the target.

+
+
--bench NAME…​
+
+

Build the specified benchmark. This flag may be specified multiple times.

+
+
--benches
+
+

Build all targets in benchmark mode that have the bench = true +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the bench flag in the +manifest settings for the target.

+
+
--all-targets
+
+

Build all targets. This is equivalent to specifying --lib --bins +--tests --benches --examples.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Build for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--release
+
+

Build optimized artifacts with the release profile. See the +PROFILES section for details on how this affects profile selection.

+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
--out-dir DIRECTORY
+
+

Copy final artifacts to this directory.

+
+

This option is unstable and available only on the +nightly channel +and requires the -Z unstable-options flag to enable. +See https://github.com/rust-lang/cargo/issues/6790 for more information.

+
+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
--build-plan
+
+

Outputs a series of JSON messages to stdout that indicate the commands to +run the build.

+
+

This option is unstable and available only on the +nightly channel +and requires the -Z unstable-options flag to enable. +See https://github.com/rust-lang/cargo/issues/5579 for more information.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Profile selection depends on the target and crate being built. By default the +dev or test profiles are used. If the --release flag is given, then the +release or bench profiles are used.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
TargetDefault Profile--release Profile

lib, bin, example

dev

release

test, bench, or any target
+ in "test" or "bench" mode

test

bench

+
+

Dependencies use the dev/release profiles.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Build the local package and all of its dependencies:

    +
    +
    +
    cargo build
    +
    +
    +
  2. +
  3. +

    Build with optimizations:

    +
    +
    +
    cargo build --release
    +
    +
    +
  4. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-check.html b/src/doc/man/generated/cargo-check.html new file mode 100644 index 00000000000..01e43427099 --- /dev/null +++ b/src/doc/man/generated/cargo-check.html @@ -0,0 +1,437 @@ +

NAME

+
+

cargo-check - Check the current package

+
+
+

SYNOPSIS

+
+
+

cargo check [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

Check a local package and all of its dependencies for errors. This will +essentially compile the packages without performing the final step of code +generation, which is faster than running cargo build. The compiler will save +metadata files to disk so that future runs will reuse them if the source has +not been modified.

+
+
+
+
+

OPTIONS

+
+
+

Package Selection

+
+

By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (--all is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the workspace.default-members key in the root Cargo.toml +manifest.

+
+
+
+
-p SPEC…​
+
--package SPEC…​
+
+

Check only the specified packages. See cargo-pkgid(1) for the +SPEC format. This flag may be specified multiple times.

+
+
--all
+
+

Check all members in the workspace.

+
+
--exclude SPEC…​
+
+

Exclude the specified packages. Must be used in conjunction with the +--all flag. This flag may be specified multiple times.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo check will check all +binary and library targets of the selected packages. Binaries are skipped if +they have required-features that are missing.

+
+
+

Passing target selection flags will check only the +specified targets.

+
+
+
+
--lib
+
+

Check the package’s library.

+
+
--bin NAME…​
+
+

Check the specified binary. This flag may be specified multiple times.

+
+
--bins
+
+

Check all binary targets.

+
+
--example NAME…​
+
+

Check the specified example. This flag may be specified multiple times.

+
+
--examples
+
+

Check all example targets.

+
+
--test NAME…​
+
+

Check the specified integration test. This flag may be specified multiple +times.

+
+
--tests
+
+

Check all targets in test mode that have the test = true manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the test flag in the +manifest settings for the target.

+
+
--bench NAME…​
+
+

Check the specified benchmark. This flag may be specified multiple times.

+
+
--benches
+
+

Check all targets in benchmark mode that have the bench = true +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the bench flag in the +manifest settings for the target.

+
+
--all-targets
+
+

Check all targets. This is equivalent to specifying --lib --bins +--tests --benches --examples.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Check for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--release
+
+

Check optimized artifacts with the release profile. See the +PROFILES section for details on how this affects profile selection.

+
+
--profile NAME
+
+

Changes check behavior. Currently only test is +supported, which will check with the +#[cfg(test)] attribute enabled. This is useful to have it +check unit tests which are usually excluded via +the cfg attribute. This does not change the actual profile used.

+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Profile selection depends on the target and crate being built. By default the +dev or test profiles are used. If the --release flag is given, then the +release or bench profiles are used.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
TargetDefault Profile--release Profile

lib, bin, example

dev

release

test, bench, or any target
+ in "test" or "bench" mode

test

bench

+
+

Dependencies use the dev/release profiles.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Check the local package for errors:

    +
    +
    +
    cargo check
    +
    +
    +
  2. +
  3. +

    Check all targets, including unit tests:

    +
    +
    +
    cargo check --all-targets --profile=test
    +
    +
    +
  4. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-clean.html b/src/doc/man/generated/cargo-clean.html new file mode 100644 index 00000000000..5a9208babc3 --- /dev/null +++ b/src/doc/man/generated/cargo-clean.html @@ -0,0 +1,241 @@ +

NAME

+
+

cargo-clean - Remove generated artifacts

+
+
+

SYNOPSIS

+
+
+

cargo clean [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

Remove artifacts from the target directory that Cargo has generated in the +past.

+
+
+

With no options, cargo clean will delete the entire target directory.

+
+
+
+
+

OPTIONS

+
+
+

Package Selection

+
+

When no packages are selected, all packages and all dependencies in the +workspace are cleaned.

+
+
+
+
-p SPEC…​
+
--package SPEC…​
+
+

Clean only the specified packages. This flag may be specified +multiple times. See cargo-pkgid(1) for the SPEC format.

+
+
+
+
+
+

Clean Options

+
+
+
--doc
+
+

This option will cause cargo clean to remove only the doc directory in +the target directory.

+
+
--release
+
+

Clean all artifacts that were built with the release or bench +profiles.

+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
--target TRIPLE
+
+

Clean for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Remove the entire target directory:

    +
    +
    +
    cargo clean
    +
    +
    +
  2. +
  3. +

    Remove only the release artifacts:

    +
    +
    +
    cargo clean --release
    +
    +
    +
  4. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-doc.html b/src/doc/man/generated/cargo-doc.html new file mode 100644 index 00000000000..1b07f0f94b2 --- /dev/null +++ b/src/doc/man/generated/cargo-doc.html @@ -0,0 +1,398 @@ +

NAME

+
+

cargo-doc - Build a package's documentation

+
+
+

SYNOPSIS

+
+
+

cargo doc [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

Build the documentation for the local package and all dependencies. The output +is placed in target/doc in rustdoc’s usual format.

+
+
+
+
+

OPTIONS

+
+
+

Documentation Options

+
+
+
--open
+
+

Open the docs in a browser after building them.

+
+
--no-deps
+
+

Do not build documentation for dependencies.

+
+
--document-private-items
+
+

Include non-public items in the documentation.

+
+
+
+
+
+

Package Selection

+
+

By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (--all is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the workspace.default-members key in the root Cargo.toml +manifest.

+
+
+
+
-p SPEC…​
+
--package SPEC…​
+
+

Document only the specified packages. See cargo-pkgid(1) for the +SPEC format. This flag may be specified multiple times.

+
+
--all
+
+

Document all members in the workspace.

+
+
--exclude SPEC…​
+
+

Exclude the specified packages. Must be used in conjunction with the +--all flag. This flag may be specified multiple times.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo doc will document all +binary and library targets of the selected package. The binary will be skipped +if its name is the same as the lib target. Binaries are skipped if they have +required-features that are missing.

+
+
+

The default behavior can be changed by setting doc = false for the target in +the manifest settings. Using target selection options will ignore the doc +flag and will always document the given target.

+
+
+
+
--lib
+
+

Document the package’s library.

+
+
--bin NAME…​
+
+

Document the specified binary. This flag may be specified multiple times.

+
+
--bins
+
+

Document all binary targets.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Document for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--release
+
+

Document optimized artifacts with the release profile. See the +PROFILES section for details on how this affects profile selection.

+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Profile selection depends on the target and crate being built. By default the +dev or test profiles are used. If the --release flag is given, then the +release or bench profiles are used.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
TargetDefault Profile--release Profile

lib, bin, example

dev

release

test, bench, or any target
+ in "test" or "bench" mode

test

bench

+
+

Dependencies use the dev/release profiles.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Build the local package documentation and its dependencies and output to +target/doc.

    +
    +
    +
    cargo doc
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-fetch.html b/src/doc/man/generated/cargo-fetch.html new file mode 100644 index 00000000000..3a1e304702d --- /dev/null +++ b/src/doc/man/generated/cargo-fetch.html @@ -0,0 +1,210 @@ +

NAME

+
+

cargo-fetch - Fetch dependencies of a package from the network

+
+
+

SYNOPSIS

+
+
+

cargo fetch [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

If a Cargo.lock file is available, this command will ensure that all of the +git dependencies and/or registry dependencies are downloaded and locally +available. Subsequent Cargo commands never touch the network after a cargo +fetch unless the lock file changes.

+
+
+

If the lock file is not available, then this command will generate the lock +file before fetching the dependencies.

+
+
+

If --target is not specified, then all target dependencies are fetched.

+
+
+

See also the cargo-prefetch +plugin which adds a command to download popular crates. This may be useful if +you plan to use Cargo without a network with the --offline flag.

+
+
+
+
+

OPTIONS

+
+
+

Fetch options

+
+
+
--target TRIPLE
+
+

Fetch for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Fetch all dependencies:

    +
    +
    +
    cargo fetch
    +
    +
    +
  2. +
+
+
+
+ \ No newline at end of file diff --git a/src/doc/man/generated/cargo-fix.html b/src/doc/man/generated/cargo-fix.html new file mode 100644 index 00000000000..3f870dad4e0 --- /dev/null +++ b/src/doc/man/generated/cargo-fix.html @@ -0,0 +1,516 @@ +

NAME

+
+

cargo-fix - Automatically fix lint warnings reported by rustc

+
+
+

SYNOPSIS

+
+
+

cargo fix [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

This Cargo subcommand will automatically take rustc’s suggestions from +diagnostics like warnings and apply them to your source code. This is intended +to help automate tasks that rustc itself already knows how to tell you to fix! +The cargo fix subcommand is also being developed for the Rust 2018 edition +to provide code the ability to easily opt-in to the new edition without having +to worry about any breakage.

+
+
+

Executing cargo fix will under the hood execute cargo-check(1). Any warnings +applicable to your crate will be automatically fixed (if possible) and all +remaining warnings will be displayed when the check process is finished. For +example if you’d like to prepare for the 2018 edition, you can do so by +executing:

+
+
+
+
cargo fix --edition
+
+
+
+

which behaves the same as cargo check --all-targets. Similarly if you’d like +to fix code for different platforms you can do:

+
+
+
+
cargo fix --edition --target x86_64-pc-windows-gnu
+
+
+
+

or if your crate has optional features:

+
+
+
+
cargo fix --edition --no-default-features --features foo
+
+
+
+

If you encounter any problems with cargo fix or otherwise have any questions +or feature requests please don’t hesitate to file an issue at +https://github.com/rust-lang/cargo

+
+
+
+
+

OPTIONS

+
+
+

Fix options

+
+
+
--broken-code
+
+

Fix code even if it already has compiler errors. This is useful if cargo +fix fails to apply the changes. It will apply the changes and leave the +broken code in the working directory for you to inspect and manually fix.

+
+
--edition
+
+

Apply changes that will update the code to the latest edition. This will +not update the edition in the Cargo.toml manifest, which must be updated +manually.

+
+
--edition-idioms
+
+

Apply suggestions that will update code to the preferred style for the +current edition.

+
+
--allow-no-vcs
+
+

Fix code even if a VCS was not detected.

+
+
--allow-dirty
+
+

Fix code even if the working directory has changes.

+
+
--allow-staged
+
+

Fix code even if the working directory has staged changes.

+
+
+
+
+
+

Package Selection

+
+

By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (--all is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the workspace.default-members key in the root Cargo.toml +manifest.

+
+
+
+
-p SPEC…​
+
--package SPEC…​
+
+

Fix only the specified packages. See cargo-pkgid(1) for the +SPEC format. This flag may be specified multiple times.

+
+
--all
+
+

Fix all members in the workspace.

+
+
--exclude SPEC…​
+
+

Exclude the specified packages. Must be used in conjunction with the +--all flag. This flag may be specified multiple times.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo fix will fix all targets +(--all-targets implied). Binaries are skipped if they have +required-features that are missing.

+
+
+

Passing target selection flags will fix only the +specified targets.

+
+
+
+
--lib
+
+

Fix the package’s library.

+
+
--bin NAME…​
+
+

Fix the specified binary. This flag may be specified multiple times.

+
+
--bins
+
+

Fix all binary targets.

+
+
--example NAME…​
+
+

Fix the specified example. This flag may be specified multiple times.

+
+
--examples
+
+

Fix all example targets.

+
+
--test NAME…​
+
+

Fix the specified integration test. This flag may be specified multiple +times.

+
+
--tests
+
+

Fix all targets in test mode that have the test = true manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the test flag in the +manifest settings for the target.

+
+
--bench NAME…​
+
+

Fix the specified benchmark. This flag may be specified multiple times.

+
+
--benches
+
+

Fix all targets in benchmark mode that have the bench = true +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the bench flag in the +manifest settings for the target.

+
+
--all-targets
+
+

Fix all targets. This is equivalent to specifying --lib --bins +--tests --benches --examples.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Fix for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--release
+
+

Fix optimized artifacts with the release profile. See the +PROFILES section for details on how this affects profile selection.

+
+
--profile NAME
+
+

Changes fix behavior. Currently only test is +supported, which will fix with the +#[cfg(test)] attribute enabled. This is useful to have it +fix unit tests which are usually excluded via +the cfg attribute. This does not change the actual profile used.

+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Profile selection depends on the target and crate being built. By default the +dev or test profiles are used. If the --release flag is given, then the +release or bench profiles are used.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
TargetDefault Profile--release Profile

lib, bin, example

dev

release

test, bench, or any target
+ in "test" or "bench" mode

test

bench

+
+

Dependencies use the dev/release profiles.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Apply compiler suggestions to the local package:

    +
    +
    +
    cargo fix
    +
    +
    +
  2. +
  3. +

    Convert a 2015 edition to 2018:

    +
    +
    +
    cargo fix --edition
    +
    +
    +
  4. +
  5. +

    Apply suggested idioms for the current edition:

    +
    +
    +
    cargo fix --edition-idioms
    +
    +
    +
  6. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-generate-lockfile.html b/src/doc/man/generated/cargo-generate-lockfile.html new file mode 100644 index 00000000000..8cd2c8f65e5 --- /dev/null +++ b/src/doc/man/generated/cargo-generate-lockfile.html @@ -0,0 +1,183 @@ +

NAME

+
+

cargo-generate-lockfile - Generate the lockfile for a package

+
+
+

SYNOPSIS

+
+
+

cargo generate-lockfile [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will create the Cargo.lock lockfile for the current package or +workspace. If the lockfile already exists, it will be rebuilt if there are any +manifest changes or dependency updates.

+
+
+

See also cargo-update(1) which is also capable of creating a Cargo.lock +lockfile and has more options for controlling update behavior.

+
+
+
+
+

OPTIONS

+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Create or update the lockfile for the current package or workspace:

    +
    +
    +
    cargo generate-lockfile
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-help.html b/src/doc/man/generated/cargo-help.html new file mode 100644 index 00000000000..0bdceebd1c9 --- /dev/null +++ b/src/doc/man/generated/cargo-help.html @@ -0,0 +1,53 @@ +

NAME

+
+

cargo-help - Get help for a Cargo command

+
+
+

SYNOPSIS

+
+
+

cargo help [SUBCOMMAND]

+
+
+
+
+

DESCRIPTION

+
+
+

Prints a help message for the given command.

+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Get help for a command:

    +
    +
    +
    cargo help build
    +
    +
    +
  2. +
  3. +

    Help is also available with the --help flag:

    +
    +
    +
    cargo build --help
    +
    +
    +
  4. +
+
+
+
+
+

SEE ALSO

+
+ +
+
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-init.html b/src/doc/man/generated/cargo-init.html new file mode 100644 index 00000000000..3af33f63677 --- /dev/null +++ b/src/doc/man/generated/cargo-init.html @@ -0,0 +1,255 @@ +

NAME

+
+

cargo-init - Create a new Cargo package in an existing directory

+
+
+

SYNOPSIS

+
+
+

cargo init [OPTIONS] [PATH]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will create a new Cargo manifest in the current directory. Give a +path as an argument to create in the given directory.

+
+
+

If there are typically-named Rust source files already in the directory, those +will be used. If not, then a sample src/main.rs file will be created, or +src/lib.rs if --lib is passed.

+
+
+

If the directory is not already in a VCS repository, then a new repository +is created (see --vcs below).

+
+
+

The "authors" field in the manifest is determined from the environment or +configuration settings. A name is required and is determined from (first match +wins):

+
+
+
    +
  • +

    cargo-new.name Cargo config value

    +
  • +
  • +

    CARGO_NAME environment variable

    +
  • +
  • +

    GIT_AUTHOR_NAME environment variable

    +
  • +
  • +

    GIT_COMMITTER_NAME environment variable

    +
  • +
  • +

    user.name git configuration value

    +
  • +
  • +

    USER environment variable

    +
  • +
  • +

    USERNAME environment variable

    +
  • +
  • +

    NAME environment variable

    +
  • +
+
+
+

The email address is optional and is determined from:

+
+
+
    +
  • +

    cargo-new.email Cargo config value

    +
  • +
  • +

    CARGO_EMAIL environment variable

    +
  • +
  • +

    GIT_AUTHOR_EMAIL environment variable

    +
  • +
  • +

    GIT_COMMITTER_EMAIL environment variable

    +
  • +
  • +

    user.email git configuration value

    +
  • +
  • +

    EMAIL environment variable

    +
  • +
+
+
+

See the reference for more information about +configuration files.

+
+
+

See cargo-new(1) for a similar command which will create a new package in +a new directory.

+
+
+
+
+

OPTIONS

+
+
+

Init Options

+
+
+
--bin
+
+

Create a package with a binary target (src/main.rs). +This is the default behavior.

+
+
--lib
+
+

Create a package with a library target (src/lib.rs).

+
+
--edition EDITION
+
+

Specify the Rust edition to use. Default is 2018. +Possible values: 2015, 2018

+
+
--name NAME
+
+

Set the package name. Defaults to the directory name.

+
+
--vcs VCS
+
+

Initialize a new VCS repository for the given version control system (git, +hg, pijul, or fossil) or do not initialize any version control at all +(none). If not specified, defaults to git or the configuration value +cargo-new.vcs, or none if already inside a VCS repository.

+
+
--registry REGISTRY
+
+

This sets the publish field in Cargo.toml to the given registry name +which will restrict publishing only to that registry.

+
+

Registry names are defined in Cargo config files. +If not specified, the default registry defined by the registry.default +config key is used. If the default registry is not set and --registry is not +used, the publish field will not be set which means that publishing will not +be restricted.

+
+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Create a binary Cargo package in the current directory:

    +
    +
    +
    cargo init
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-install.html b/src/doc/man/generated/cargo-install.html new file mode 100644 index 00000000000..5c567e898c6 --- /dev/null +++ b/src/doc/man/generated/cargo-install.html @@ -0,0 +1,375 @@ +

NAME

+
+

cargo-install - Build and install a Rust binary

+
+
+

SYNOPSIS

+
+
+

cargo install [OPTIONS] CRATE…​
+cargo install [OPTIONS] --path PATH
+cargo install [OPTIONS] --git URL [CRATE…​]
+cargo install [OPTIONS] --list

+
+
+
+
+

DESCRIPTION

+
+
+

This command manages Cargo’s local set of installed binary crates. Only +packages which have executable [[bin]] or [[example]] targets can be +installed, and all executables are installed into the installation root’s +bin folder.

+
+
+

The installation root is determined, in order of precedence:

+
+
+
    +
  • +

    --root option

    +
  • +
  • +

    CARGO_INSTALL_ROOT environment variable

    +
  • +
  • +

    install.root Cargo config value

    +
  • +
  • +

    CARGO_HOME environment variable

    +
  • +
  • +

    $HOME/.cargo

    +
  • +
+
+
+

There are multiple sources from which a crate can be installed. The default +location is crates.io but the --git, --path, and --registry flags can +change this source. If the source contains more than one package (such as +crates.io or a git repository with multiple crates) the CRATE argument is +required to indicate which crate should be installed.

+
+
+

Crates from crates.io can optionally specify the version they wish to install +via the --version flags, and similarly packages from git repositories can +optionally specify the branch, tag, or revision that should be installed. If a +crate has multiple binaries, the --bin argument can selectively install only +one of them, and if you’d rather install examples the --example argument can +be used as well.

+
+
+

If the source is crates.io or --git then by default the crate will be built +in a temporary target directory. To avoid this, the target directory can be +specified by setting the CARGO_TARGET_DIR environment variable to a relative +path. In particular, this can be useful for caching build artifacts on +continuous integration systems.

+
+
+

By default, the Cargo.lock file that is included with the package will be +ignored. This means that Cargo will recompute which versions of dependencies +to use, possibly using newer versions that have been released since the +package was published. The --locked flag can be used to force Cargo to use +the packaged Cargo.lock file if it is available. This may be useful for +ensuring reproducible builds, to use the exact same set of dependencies that +were available when the package was published. It may also be useful if a +newer version of a dependency is published that no longer builds on your +system, or has other problems. The downside to using --locked is that you +will not receive any fixes or updates to any dependency. Note that Cargo did +not start publishing Cargo.lock files until version 1.37, which means +packages published with prior versions will not have a Cargo.lock file +available.

+
+
+
+
+

OPTIONS

+
+
+

Install Options

+
+
+
--vers VERSION
+
--version VERSION
+
+

Specify a version to install.

+
+
--git URL
+
+

Git URL to install the specified crate from.

+
+
--branch BRANCH
+
+

Branch to use when installing from git.

+
+
--tag TAG
+
+

Tag to use when installing from git.

+
+
--rev SHA
+
+

Specific commit to use when installing from git.

+
+
--path PATH
+
+

Filesystem path to local crate to install.

+
+
--list
+
+

List all installed packages and their versions.

+
+
-f
+
--force
+
+

Force overwriting existing crates or binaries. This can be used to +reinstall or upgrade a crate.

+
+
--bin NAME…​
+
+

Install only the specified binary.

+
+
--bins
+
+

Install all binaries.

+
+
--example NAME…​
+
+

Install only the specified example.

+
+
--examples
+
+

Install all examples.

+
+
--root DIR
+
+

Directory to install packages into.

+
+
--registry REGISTRY
+
+

Name of the registry to use. Registry names are defined in Cargo config files. +If not specified, the default registry is used, which is defined by the +registry.default config key which defaults to crates-io.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Install for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--debug
+
+

Build with the dev profile instead the release profile.

+
+
+
+
+
+

Manifest Options

+
+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Install a package from crates.io:

    +
    +
    +
    cargo install ripgrep
    +
    +
    +
  2. +
  3. +

    Reinstall or upgrade a package:

    +
    +
    +
    cargo install ripgrep --force
    +
    +
    +
  4. +
+
+
+
+ \ No newline at end of file diff --git a/src/doc/man/generated/cargo-locate-project.html b/src/doc/man/generated/cargo-locate-project.html new file mode 100644 index 00000000000..f4d8d8b971f --- /dev/null +++ b/src/doc/man/generated/cargo-locate-project.html @@ -0,0 +1,152 @@ +

NAME

+
+

cargo-locate-project - Print a JSON representation of a Cargo.toml file's location

+
+
+

SYNOPSIS

+
+
+

cargo locate-project [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will print a JSON object to stdout with the full path to the +Cargo.toml manifest.

+
+
+

See also cargo-metadata(1) which is capable of returning the path to a +workspace root.

+
+
+
+
+

OPTIONS

+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Display the path to the manifest based on the current directory:

    +
    +
    +
    cargo locate-project
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-login.html b/src/doc/man/generated/cargo-login.html new file mode 100644 index 00000000000..d913311730e --- /dev/null +++ b/src/doc/man/generated/cargo-login.html @@ -0,0 +1,160 @@ +

NAME

+
+

cargo-login - Save an API token from the registry locally

+
+
+

SYNOPSIS

+
+
+

cargo login [OPTIONS] [TOKEN]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will save the API token to disk so that commands that require +authentication, such as cargo-publish(1), will be automatically +authenticated. The token is saved in $CARGO_HOME/credentials. CARGO_HOME +defaults to .cargo in your home directory.

+
+
+

If the TOKEN argument is not specified, it will be read from stdin.

+
+
+

The API token for crates.io may be retrieved from https://crates.io/me.

+
+
+

Take care to keep the token secret, it should not be shared with anyone else.

+
+
+
+
+

OPTIONS

+
+
+

Login Options

+
+
+
--registry REGISTRY
+
+

Name of the registry to use. Registry names are defined in Cargo config files. +If not specified, the default registry is used, which is defined by the +registry.default config key which defaults to crates-io.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Save the API token to disk:

    +
    +
    +
    cargo login
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-metadata.html b/src/doc/man/generated/cargo-metadata.html new file mode 100644 index 00000000000..9a0cfc7d314 --- /dev/null +++ b/src/doc/man/generated/cargo-metadata.html @@ -0,0 +1,459 @@ +

NAME

+
+

cargo-metadata - Machine-readable metadata about the current package

+
+
+

SYNOPSIS

+
+
+

cargo metadata [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

Output the resolved dependencies of a package, the concrete used versions +including overrides, in JSON to stdout.

+
+
+

It is recommended to include the --format-version flag to future-proof +your code to ensure the output is in the format you are expecting.

+
+
+

See the cargo_metadata crate +for a Rust API for reading the metadata.

+
+
+
+
+

OUTPUT FORMAT

+
+
+

The output has the following format:

+
+
+
+
{
+    /* Array of all packages in the workspace.
+       It also includes all feature-enabled dependencies unless --no-deps is used.
+    */
+    "packages": [
+        {
+            /* The name of the package. */
+            "name": "my-package",
+            /* The version of the package. */
+            "version": "0.1.0",
+            /* The Package ID, a unique identifier for referring to the package. */
+            "id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+            /* The license value from the manifest, or null. */
+            "license": "MIT/Apache-2.0",
+            /* The license-file value from the manifest, or null. */
+            "license_file": "LICENSE",
+            /* The description value from the manifest, or null. */
+            "description": "Package description.",
+            /* The source ID of the package. This represents where
+               a package is retrieved from.
+               This is null for path dependencies and workspace members.
+               For other dependencies, it is a string with the format:
+               - "registry+URL" for registry-based dependencies.
+                 Example: "registry+https://github.com/rust-lang/crates.io-index"
+               - "git+URL" for git-based dependencies.
+                 Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c"
+            */
+            "source": null,
+            /* Array of dependencies declared in the package's manifest. */
+            "dependencies": [
+                {
+                    /* The name of the dependency. */
+                    "name": "bitflags",
+                    /* The source ID of the dependency. May be null, see
+                       description for the package source.
+                    */
+                    "source": "registry+https://github.com/rust-lang/crates.io-index",
+                    /* The version requirement for the dependency.
+                       Dependencies without a version requirement have a value of "*".
+                    */
+                    "req": "^1.0",
+                    /* The dependency kind.
+                       "dev", "build", or null for a normal dependency.
+                    */
+                    "kind": null,
+                    /* If the dependency is renamed, this is the new name for
+                       the dependency as a string.  null if it is not renamed.
+                    */
+                    "rename": null,
+                    /* Boolean of whether or not this is an optional dependency. */
+                    "optional": false,
+                    /* Boolean of whether or not default features are enabled. */
+                    "uses_default_features": true,
+                    /* Array of features enabled. */
+                    "features": [],
+                    /* The target platform for the dependency.
+                       null if not a target dependency.
+                    */
+                    "target": "cfg(windows)",
+                    /* A string of the URL of the registry this dependency is from.
+                       If not specified or null, the dependency is from the default
+                       registry (crates.io).
+                    */
+                    "registry": null
+                }
+            ],
+            /* Array of Cargo targets. */
+            "targets": [
+                {
+                    /* Array of target kinds.
+                       - lib targets list the `crate-type` values from the
+                         manifest such as "lib", "rlib", "dylib",
+                         "proc-macro", etc. (default ["lib"])
+                       - binary is ["bin"]
+                       - example is ["example"]
+                       - integration test is ["test"]
+                       - benchmark is ["bench"]
+                       - build script is ["custom-build"]
+                    */
+                    "kind": [
+                        "bin"
+                    ],
+                    /* Array of crate types.
+                       - lib and example libraries list the `crate-type` values
+                         from the manifest such as "lib", "rlib", "dylib",
+                         "proc-macro", etc. (default ["lib"])
+                       - all other target kinds are ["bin"]
+                    */
+                    "crate_types": [
+                        "bin"
+                    ],
+                    /* The name of the target. */
+                    "name": "my-package",
+                    /* Absolute path to the root source file of the target. */
+                    "src_path": "/path/to/my-package/src/main.rs",
+                    /* The Rust edition of the target.
+                       Defaults to the package edition.
+                    */
+                    "edition": "2018",
+                    /* Array of required features.
+                       This property is not included if no required features are set.
+                    */
+                    "required-features": ["feat1"],
+                    /* Whether or not this target has doc tests enabled, and
+                       the target is compatible with doc testing.
+                    */
+                    "doctest": false
+                }
+            ],
+            /* Set of features defined for the package.
+               Each feature maps to an array of features or dependencies it
+               enables.
+            */
+            "features": {
+                "default": [
+                    "feat1"
+                ],
+                "feat1": [],
+                "feat2": []
+            },
+            /* Absolute path to this package's manifest. */
+            "manifest_path": "/path/to/my-package/Cargo.toml",
+            /* Package metadata.
+               This is null if no metadata is specified.
+            */
+            "metadata": {
+                "docs": {
+                    "rs": {
+                        "all-features": true
+                    }
+                }
+            },
+            /* Array of authors from the manifest.
+               Empty array if no authors specified.
+            */
+            "authors": [
+                "Jane Doe <user@example.com>"
+            ],
+            /* Array of categories from the manifest. */
+            "categories": [
+                "command-line-utilities"
+            ],
+            /* Array of keywords from the manifest. */
+            "keywords": [
+                "cli"
+            ],
+            /* The readme value from the manifest or null if not specified. */
+            "readme": "README.md",
+            /* The repository value from the manifest or null if not specified. */
+            "repository": "https://github.com/rust-lang/cargo",
+            /* The default edition of the package.
+               Note that individual targets may have different editions.
+            */
+            "edition": "2018",
+            /* Optional string that is the name of a native library the package
+               is linking to.
+            */
+            "links": null,
+        }
+    ],
+    /* Array of members of the workspace.
+       Each entry is the Package ID for the package.
+    */
+    "workspace_members": [
+        "my-package 0.1.0 (path+file:///path/to/my-package)",
+    ],
+    /* The resolved dependency graph, with the concrete versions and features
+       selected. The set depends on the enabled features.
+       This is null if --no-deps is specified.
+    */
+    "resolve": {
+        /* Array of nodes within the dependency graph.
+           Each node is a package.
+        */
+        "nodes": [
+            {
+                /* The Package ID of this node. */
+                "id": "my-package 0.1.0 (path+file:///path/to/my-package)",
+                /* The dependencies of this package, an array of Package IDs. */
+                "dependencies": [
+                    "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)"
+                ],
+                /* The dependencies of this package. This is an alternative to
+                   "dependencies" which contains additional information. In
+                   particular, this handles renamed dependencies.
+                */
+                "deps": [
+                    {
+                        /* The name of the dependency's library target.
+                           If this is a renamed dependency, this is the new
+                           name.
+                        */
+                        "name": "bitflags",
+                        /* The Package ID of the dependency. */
+                        "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)"
+                    }
+                ],
+                /* Array of features enabled on this package. */
+                "features": [
+                    "default"
+                ]
+            }
+        ],
+        /* The root package of the workspace.
+           This is null if this is a virtual workspace. Otherwise it is
+           the Package ID of the root package.
+        */
+        "root": "my-package 0.1.0 (path+file:///path/to/my-package)"
+    },
+    /* The absolute path to the build directory where Cargo places its output. */
+    "target_directory": "/path/to/my-package/target",
+    /* The version of the schema for this metadata structure.
+       This will be changed if incompatible changes are ever made.
+    */
+    "version": 1,
+    /* The absolute path to the root of the workspace. */
+    "workspace_root": "/path/to/my-package"
+}
+
+
+
+
+
+

OPTIONS

+
+
+

Output Options

+
+
+
--no-deps
+
+

Output information only about the workspace members and don’t fetch +dependencies.

+
+
--format-version VERSION
+
+

Specify the version of the output format to use. Currently 1 is the only +possible value.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Output JSON about the current package:

    +
    +
    +
    cargo metadata --format-version=1
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+
+ +
+
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-new.html b/src/doc/man/generated/cargo-new.html new file mode 100644 index 00000000000..87e7708479c --- /dev/null +++ b/src/doc/man/generated/cargo-new.html @@ -0,0 +1,248 @@ +

NAME

+
+

cargo-new - Create a new Cargo package

+
+
+

SYNOPSIS

+
+
+

cargo new [OPTIONS] PATH

+
+
+
+
+

DESCRIPTION

+
+
+

This command will create a new Cargo package in the given directory. This +includes a simple template with a Cargo.toml manifest, sample source file, +and a VCS ignore file. If the directory is not already in a VCS repository, +then a new repository is created (see --vcs below).

+
+
+

The "authors" field in the manifest is determined from the environment or +configuration settings. A name is required and is determined from (first match +wins):

+
+
+
    +
  • +

    cargo-new.name Cargo config value

    +
  • +
  • +

    CARGO_NAME environment variable

    +
  • +
  • +

    GIT_AUTHOR_NAME environment variable

    +
  • +
  • +

    GIT_COMMITTER_NAME environment variable

    +
  • +
  • +

    user.name git configuration value

    +
  • +
  • +

    USER environment variable

    +
  • +
  • +

    USERNAME environment variable

    +
  • +
  • +

    NAME environment variable

    +
  • +
+
+
+

The email address is optional and is determined from:

+
+
+
    +
  • +

    cargo-new.email Cargo config value

    +
  • +
  • +

    CARGO_EMAIL environment variable

    +
  • +
  • +

    GIT_AUTHOR_EMAIL environment variable

    +
  • +
  • +

    GIT_COMMITTER_EMAIL environment variable

    +
  • +
  • +

    user.email git configuration value

    +
  • +
  • +

    EMAIL environment variable

    +
  • +
+
+
+

See the reference for more information about +configuration files.

+
+
+

See cargo-init(1) for a similar command which will create a new manifest +in an existing directory.

+
+
+
+
+

OPTIONS

+
+
+

New Options

+
+
+
--bin
+
+

Create a package with a binary target (src/main.rs). +This is the default behavior.

+
+
--lib
+
+

Create a package with a library target (src/lib.rs).

+
+
--edition EDITION
+
+

Specify the Rust edition to use. Default is 2018. +Possible values: 2015, 2018

+
+
--name NAME
+
+

Set the package name. Defaults to the directory name.

+
+
--vcs VCS
+
+

Initialize a new VCS repository for the given version control system (git, +hg, pijul, or fossil) or do not initialize any version control at all +(none). If not specified, defaults to git or the configuration value +cargo-new.vcs, or none if already inside a VCS repository.

+
+
--registry REGISTRY
+
+

This sets the publish field in Cargo.toml to the given registry name +which will restrict publishing only to that registry.

+
+

Registry names are defined in Cargo config files. +If not specified, the default registry defined by the registry.default +config key is used. If the default registry is not set and --registry is not +used, the publish field will not be set which means that publishing will not +be restricted.

+
+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Create a binary Cargo package in the given directory:

    +
    +
    +
    cargo new foo
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-owner.html b/src/doc/man/generated/cargo-owner.html new file mode 100644 index 00000000000..80647ad62db --- /dev/null +++ b/src/doc/man/generated/cargo-owner.html @@ -0,0 +1,212 @@ +

NAME

+
+

cargo-owner - Manage the owners of a crate on the registry

+
+
+

SYNOPSIS

+
+
+

cargo owner [OPTIONS] --add LOGIN [CRATE]
+cargo owner [OPTIONS] --remove LOGIN [CRATE]
+cargo owner [OPTIONS] --list [CRATE]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will modify the owners for a crate on the registry. Owners of a +crate can upload new versions and yank old versions. Non-team owners can also +modify the set of owners, so take care!

+
+
+

This command requires you to be authenticated with either the --token option +or using cargo-login(1).

+
+
+

If the crate name is not specified, it will use the package name from the +current directory.

+
+
+

See the reference for more +information about owners and publishing.

+
+
+
+
+

OPTIONS

+
+
+

Owner Options

+
+
+
-a
+
--add LOGIN…​
+
+

Invite the given user or team as an owner.

+
+
-r
+
--remove LOGIN…​
+
+

Remove the given user or team as an owner.

+
+
-l
+
--list
+
+

List owners of a crate.

+
+
--token TOKEN
+
+

API token to use when authenticating. This overrides the token stored in +the credentials file (which is created by cargo-login(1)).

+
+

Cargo config environment variables can be +used to override the tokens stored in the credentials file. The token for +crates.io may be specified with the CARGO_REGISTRY_TOKEN environment +variable. Tokens for other registries may be specified with environment +variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name +of the registry in all capital letters.

+
+
+
--index INDEX
+
+

The URL of the registry index to use.

+
+
--registry REGISTRY
+
+

Name of the registry to use. Registry names are defined in Cargo config files. +If not specified, the default registry is used, which is defined by the +registry.default config key which defaults to crates-io.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    List owners of a package:

    +
    +
    +
    cargo owner --list foo
    +
    +
    +
  2. +
  3. +

    Invite an owner to a package:

    +
    +
    +
    cargo owner --add username foo
    +
    +
    +
  4. +
  5. +

    Remove an owner from a package:

    +
    +
    +
    cargo owner --remove username foo
    +
    +
    +
  6. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-package.html b/src/doc/man/generated/cargo-package.html new file mode 100644 index 00000000000..2061369de6f --- /dev/null +++ b/src/doc/man/generated/cargo-package.html @@ -0,0 +1,323 @@ +

NAME

+
+

cargo-package - Assemble the local package into a distributable tarball

+
+
+

SYNOPSIS

+
+
+

cargo package [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will create a distributable, compressed .crate file with the +source code of the package in the current directory. The resulting file will +be stored in the target/package directory. This performs the following +steps:

+
+
+
    +
  1. +

    Load and check the current workspace, performing some basic checks.

    +
    +
      +
    • +

      Path dependencies are not allowed unless they have a version key. Cargo +will ignore the path key for dependencies in published packages.

      +
    • +
    +
    +
  2. +
  3. +

    Create the compressed .crate file.

    +
    +
      +
    • +

      The original Cargo.toml file is rewritten and normalized.

      +
    • +
    • +

      [patch], [replace], and [workspace] sections are removed from the +manifest.

      +
    • +
    • +

      Cargo.lock is automatically included if the package contains an +executable binary or example target. cargo-install(1) will use the +packaged lock file if the --locked flag is used.

      +
    • +
    • +

      A .cargo_vcs_info.json file is included that contains information +about the current VCS checkout hash if available (not included with +--allow-dirty).

      +
    • +
    +
    +
  4. +
  5. +

    Extract the .crate file and build it to verify it can build.

    +
  6. +
  7. +

    Check that build scripts did not modify any source files.

    +
  8. +
+
+
+

The list of files included can be controlled with the include and exclude +fields in the manifest.

+
+
+

See the reference for more details about +packaging and publishing.

+
+
+
+
+

OPTIONS

+
+
+

Package Options

+
+
+
-l
+
--list
+
+

Print files included in a package without making one.

+
+
--no-verify
+
+

Don’t verify the contents by building them.

+
+
--no-metadata
+
+

Ignore warnings about a lack of human-usable metadata (such as the +description or the license).

+
+
--allow-dirty
+
+

Allow working directories with uncommitted VCS changes to be packaged.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Package for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Create a compressed .crate file of the current package:

    +
    +
    +
    cargo package
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-pkgid.html b/src/doc/man/generated/cargo-pkgid.html new file mode 100644 index 00000000000..c2fcf99c65a --- /dev/null +++ b/src/doc/man/generated/cargo-pkgid.html @@ -0,0 +1,258 @@ +

NAME

+
+

cargo-pkgid - Print a fully qualified package specification

+
+
+

SYNOPSIS

+
+
+

cargo pkgid [OPTIONS] [SPEC]

+
+
+
+
+

DESCRIPTION

+
+
+

Given a SPEC argument, print out the fully qualified package ID specifier +for a package or dependency in the current workspace. This command will +generate an error if SPEC is ambiguous as to which package it refers to in +the dependency graph. If no SPEC is given, then the specifier for the local +package is printed.

+
+
+

This command requires that a lockfile is available and dependencies have been +fetched.

+
+
+

A package specifier consists of a name, version, and source URL. You are +allowed to use partial specifiers to succinctly match a specific package as +long as it matches only one package. The format of a SPEC can be one of the +following:

+
+ + ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1. SPEC Query Format
SPEC StructureExample SPEC

NAME

bitflags

NAME:VERSION

bitflags:1.0.4

URL

https://github.com/rust-lang/cargo

URL#VERSION

https://github.com/rust-lang/cargo#0.33.0

URL#NAME

https://github.com/rust-lang/crates.io-index#bitflags

URL#NAME:VERSION

https://github.com/rust-lang/cargo#crates-io:0.21.0

+
+
+
+

OPTIONS

+
+
+

Package Selection

+
+
+
-p SPEC
+
--package SPEC
+
+

Get the package ID for the given package instead of the current package.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Retrieve package specification for foo package:

    +
    +
    +
    cargo pkgid foo
    +
    +
    +
  2. +
  3. +

    Retrieve package specification for version 1.0.0 of foo:

    +
    +
    +
    cargo pkgid foo:1.0.0
    +
    +
    +
  4. +
  5. +

    Retrieve package specification for foo from crates.io:

    +
    +
    +
    cargo pkgid https://github.com/rust-lang/crates.io-index#foo
    +
    +
    +
  6. +
+
+
+
+ \ No newline at end of file diff --git a/src/doc/man/generated/cargo-publish.html b/src/doc/man/generated/cargo-publish.html new file mode 100644 index 00000000000..1aeadcc5014 --- /dev/null +++ b/src/doc/man/generated/cargo-publish.html @@ -0,0 +1,317 @@ +

NAME

+
+

cargo-publish - Upload a package to the registry

+
+
+

SYNOPSIS

+
+
+

cargo publish [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will create a distributable, compressed .crate file with the +source code of the package in the current directory and upload it to a +registry. The default registry is https://crates.io. This performs the +following steps:

+
+
+
    +
  1. +

    Performs a few checks, including:

    +
    +
      +
    • +

      Checks the package.publish key in the manifest for restrictions on which +registries you are allowed to publish to.

      +
    • +
    +
    +
  2. +
  3. +

    Create a .crate file by following the steps in cargo-package(1).

    +
  4. +
  5. +

    Upload the crate to the registry. Note that the server will perform +additional checks on the crate.

    +
  6. +
+
+
+

This command requires you to be authenticated with either the --token option +or using cargo-login(1).

+
+
+

See the reference for more details about +packaging and publishing.

+
+
+
+
+

OPTIONS

+
+
+

Publish Options

+
+
+
--dry-run
+
+

Perform all checks without uploading.

+
+
--token TOKEN
+
+

API token to use when authenticating. This overrides the token stored in +the credentials file (which is created by cargo-login(1)).

+
+

Cargo config environment variables can be +used to override the tokens stored in the credentials file. The token for +crates.io may be specified with the CARGO_REGISTRY_TOKEN environment +variable. Tokens for other registries may be specified with environment +variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name +of the registry in all capital letters.

+
+
+
--no-verify
+
+

Don’t verify the contents by building them.

+
+
--allow-dirty
+
+

Allow working directories with uncommitted VCS changes to be packaged.

+
+
--index INDEX
+
+

The URL of the registry index to use.

+
+
--registry REGISTRY
+
+

Name of the registry to use. Registry names are defined in Cargo config files. +If not specified, the default registry is used, which is defined by the +registry.default config key which defaults to crates-io.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Publish for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Publish the current package:

    +
    +
    +
    cargo publish
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-run.html b/src/doc/man/generated/cargo-run.html new file mode 100644 index 00000000000..abadc739f9b --- /dev/null +++ b/src/doc/man/generated/cargo-run.html @@ -0,0 +1,368 @@ +

NAME

+
+

cargo-run - Run the current package

+
+
+

SYNOPSIS

+
+
+

cargo run [OPTIONS] [-- ARGS]

+
+
+
+
+

DESCRIPTION

+
+
+

Run a binary or example of the local package.

+
+
+

All the arguments following the two dashes (--) are passed to the binary to +run. If you’re passing arguments to both Cargo and the binary, the ones after +-- go to the binary, the ones before go to Cargo.

+
+
+
+
+

OPTIONS

+
+
+

Package Selection

+
+

By default, the package in the current working directory is selected. The -p +flag can be used to choose a different package in a workspace.

+
+
+
+
-p SPEC
+
--package SPEC
+
+

The package to run. See cargo-pkgid(1) for +the SPEC format.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo run will run the binary +target. If there are multiple binary targets, you must pass a target flag to +choose one. Or, the default-run field may be specified in the [package] +section of Cargo.toml to choose the name of the binary to run by default.

+
+
+
+
--bin NAME
+
+

Run the specified binary.

+
+
--example NAME
+
+

Run the specified example.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Run for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--release
+
+

Run optimized artifacts with the release profile. See the +PROFILES section for details on how this affects profile selection.

+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Profile selection depends on the target and crate being built. By default the +dev or test profiles are used. If the --release flag is given, then the +release or bench profiles are used.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
TargetDefault Profile--release Profile

lib, bin, example

dev

release

test, bench, or any target
+ in "test" or "bench" mode

test

bench

+
+

Dependencies use the dev/release profiles.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Build the local package and run its main target (assuming only one binary):

    +
    +
    +
    cargo run
    +
    +
    +
  2. +
  3. +

    Run an example with extra arguments:

    +
    +
    +
    cargo run --example exname -- --exoption exarg1 exarg2
    +
    +
    +
  4. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-rustc.html b/src/doc/man/generated/cargo-rustc.html new file mode 100644 index 00000000000..37ff9606ca0 --- /dev/null +++ b/src/doc/man/generated/cargo-rustc.html @@ -0,0 +1,430 @@ +

NAME

+
+

cargo-rustc - Compile the current package, and pass extra options to the compiler

+
+
+

SYNOPSIS

+
+
+

cargo rustc [OPTIONS] [-- ARGS]

+
+
+
+
+

DESCRIPTION

+
+
+

The specified target for the current package (or package specified by -p if +provided) will be compiled along with all of its dependencies. The specified +ARGS will all be passed to the final compiler invocation, not any of the +dependencies. Note that the compiler will still unconditionally receive +arguments such as -L, --extern, and --crate-type, and the specified +ARGS will simply be added to the compiler invocation.

+
+
+

See https://doc.rust-lang.org/rustc/index.html for documentation on rustc +flags.

+
+
+

This command requires that only one target is being compiled when additional +arguments are provided. If more than one target is available for the current +package the filters of --lib, --bin, etc, must be used to select which +target is compiled. +To pass flags to all compiler processes spawned by Cargo, use the RUSTFLAGS +environment variable or the build.rustflags +config value.

+
+
+
+
+

OPTIONS

+
+
+

Package Selection

+
+

By default, the package in the current working directory is selected. The -p +flag can be used to choose a different package in a workspace.

+
+
+
+
-p SPEC
+
--package SPEC
+
+

The package to build. See cargo-pkgid(1) for +the SPEC format.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo rustc will build all +binary and library targets of the selected package.

+
+
+

Passing target selection flags will build only the +specified targets.

+
+
+
+
--lib
+
+

Build the package’s library.

+
+
--bin NAME…​
+
+

Build the specified binary. This flag may be specified multiple times.

+
+
--bins
+
+

Build all binary targets.

+
+
--example NAME…​
+
+

Build the specified example. This flag may be specified multiple times.

+
+
--examples
+
+

Build all example targets.

+
+
--test NAME…​
+
+

Build the specified integration test. This flag may be specified multiple +times.

+
+
--tests
+
+

Build all targets in test mode that have the test = true manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the test flag in the +manifest settings for the target.

+
+
--bench NAME…​
+
+

Build the specified benchmark. This flag may be specified multiple times.

+
+
--benches
+
+

Build all targets in benchmark mode that have the bench = true +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the bench flag in the +manifest settings for the target.

+
+
--all-targets
+
+

Build all targets. This is equivalent to specifying --lib --bins +--tests --benches --examples.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Build for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--release
+
+

Build optimized artifacts with the release profile. See the +PROFILES section for details on how this affects profile selection.

+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Profile selection depends on the target and crate being built. By default the +dev or test profiles are used. If the --release flag is given, then the +release or bench profiles are used.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
TargetDefault Profile--release Profile

lib, bin, example

dev

release

test, bench, or any target
+ in "test" or "bench" mode

test

bench

+
+

Dependencies use the dev/release profiles.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Check if your package (not including dependencies) uses unsafe code:

    +
    +
    +
    cargo rustc --lib -- -D unsafe-code
    +
    +
    +
  2. +
  3. +

    Try an experimental flag on the nightly compiler, such as this which prints +the size of every type:

    +
    +
    +
    cargo rustc --lib -- -Z print-type-sizes
    +
    +
    +
  4. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-rustdoc.html b/src/doc/man/generated/cargo-rustdoc.html new file mode 100644 index 00000000000..c0bc147e57a --- /dev/null +++ b/src/doc/man/generated/cargo-rustdoc.html @@ -0,0 +1,434 @@ +

NAME

+
+

cargo-rustdoc - Build a package's documentation, using specified custom flags

+
+
+

SYNOPSIS

+
+
+

cargo rustdoc [OPTIONS] [-- ARGS]

+
+
+
+
+

DESCRIPTION

+
+
+

The specified target for the current package (or package specified by -p if +provided) will be documented with the specified ARGS being passed to the +final rustdoc invocation. Dependencies will not be documented as part of this +command. Note that rustdoc will still unconditionally receive arguments such +as -L, --extern, and --crate-type, and the specified ARGS will simply +be added to the rustdoc invocation.

+
+
+

See https://doc.rust-lang.org/rustdoc/index.html for documentation on rustdoc +flags.

+
+
+

This command requires that only one target is being compiled when additional +arguments are provided. If more than one target is available for the current +package the filters of --lib, --bin, etc, must be used to select which +target is compiled. +To pass flags to all rustdoc processes spawned by Cargo, use the +RUSTDOCFLAGS environment variable or the build.rustdocflags configuration +option.

+
+
+
+
+

OPTIONS

+
+
+

Documentation Options

+
+
+
--open
+
+

Open the docs in a browser after building them.

+
+
+
+
+
+

Package Selection

+
+

By default, the package in the current working directory is selected. The -p +flag can be used to choose a different package in a workspace.

+
+
+
+
-p SPEC
+
--package SPEC
+
+

The package to document. See cargo-pkgid(1) for +the SPEC format.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo rustdoc will document all +binary and library targets of the selected package. The binary will be skipped +if its name is the same as the lib target. Binaries are skipped if they have +required-features that are missing.

+
+
+

Passing target selection flags will document only the +specified targets.

+
+
+
+
--lib
+
+

Document the package’s library.

+
+
--bin NAME…​
+
+

Document the specified binary. This flag may be specified multiple times.

+
+
--bins
+
+

Document all binary targets.

+
+
--example NAME…​
+
+

Document the specified example. This flag may be specified multiple times.

+
+
--examples
+
+

Document all example targets.

+
+
--test NAME…​
+
+

Document the specified integration test. This flag may be specified multiple +times.

+
+
--tests
+
+

Document all targets in test mode that have the test = true manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the test flag in the +manifest settings for the target.

+
+
--bench NAME…​
+
+

Document the specified benchmark. This flag may be specified multiple times.

+
+
--benches
+
+

Document all targets in benchmark mode that have the bench = true +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the bench flag in the +manifest settings for the target.

+
+
--all-targets
+
+

Document all targets. This is equivalent to specifying --lib --bins +--tests --benches --examples.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Document for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--release
+
+

Document optimized artifacts with the release profile. See the +PROFILES section for details on how this affects profile selection.

+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Profile selection depends on the target and crate being built. By default the +dev or test profiles are used. If the --release flag is given, then the +release or bench profiles are used.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
TargetDefault Profile--release Profile

lib, bin, example

dev

release

test, bench, or any target
+ in "test" or "bench" mode

test

bench

+
+

Dependencies use the dev/release profiles.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Build documentation with custom CSS included from a given file:

    +
    +
    +
    cargo rustdoc --lib -- --extend-css extra.css
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-search.html b/src/doc/man/generated/cargo-search.html new file mode 100644 index 00000000000..b2ec506a9fb --- /dev/null +++ b/src/doc/man/generated/cargo-search.html @@ -0,0 +1,158 @@ +

NAME

+
+

cargo-search - Search packages in crates.io

+
+
+

SYNOPSIS

+
+
+

cargo search [OPTIONS] [QUERY…​]

+
+
+
+
+

DESCRIPTION

+
+
+

This performs a textual search for crates on https://crates.io. The matching +crates will be displayed along with their description in TOML format suitable +for copying into a Cargo.toml manifest.

+
+
+
+
+

OPTIONS

+
+
+

Search Options

+
+
+
--limit LIMIT
+
+

Limit the number of results (default: 10, max: 100).

+
+
--index INDEX
+
+

The URL of the registry index to use.

+
+
--registry REGISTRY
+
+

Name of the registry to use. Registry names are defined in Cargo config files. +If not specified, the default registry is used, which is defined by the +registry.default config key which defaults to crates-io.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Search for a package from crates.io:

    +
    +
    +
    cargo search serde
    +
    +
    +
  2. +
+
+
+
+ \ No newline at end of file diff --git a/src/doc/man/generated/cargo-test.html b/src/doc/man/generated/cargo-test.html new file mode 100644 index 00000000000..0419c076a02 --- /dev/null +++ b/src/doc/man/generated/cargo-test.html @@ -0,0 +1,545 @@ +

NAME

+
+

cargo-test - Execute unit and integration tests of a package

+
+
+

SYNOPSIS

+
+
+

cargo test [OPTIONS] [TESTNAME] [-- TEST-OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

Compile and execute unit and integration tests.

+
+
+

The test filtering argument TESTNAME and all the arguments following the two +dashes (--) are passed to the test binaries and thus to libtest (rustc’s +built in unit-test and micro-benchmarking framework). If you’re passing +arguments to both Cargo and the binary, the ones after -- go to the binary, +the ones before go to Cargo. For details about libtest’s arguments see the +output of cargo test — --help. As an example, this will run all tests with +foo in their name on 3 threads in parallel:

+
+
+
+
cargo test foo -- --test-threads 3
+
+
+
+

Tests are built with the --test option to rustc which creates an +executable with a main function that automatically runs all functions +annotated with the #[test] attribute in multiple threads. #[bench] +annotated functions will also be run with one iteration to verify that they +are functional.

+
+
+

The libtest harness may be disabled by setting harness = false in the target +manifest settings, in which case your code will need to provide its own main +function to handle running tests.

+
+
+

Documentation tests are also run by default, which is handled by rustdoc. It +extracts code samples from documentation comments and executes them. See the +rustdoc book for more information on +writing doc tests.

+
+
+
+
+

OPTIONS

+
+
+

Test Options

+
+
+
--no-run
+
+

Compile, but don’t run tests.

+
+
--no-fail-fast
+
+

Run all tests regardless of failure. Without this flag, Cargo will exit +after the first executable fails. The Rust test harness will run all +tests within the executable to completion, this flag only applies to +the executable as a whole.

+
+
+
+
+
+

Package Selection

+
+

By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (--all is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the workspace.default-members key in the root Cargo.toml +manifest.

+
+
+
+
-p SPEC…​
+
--package SPEC…​
+
+

Test only the specified packages. See cargo-pkgid(1) for the +SPEC format. This flag may be specified multiple times.

+
+
--all
+
+

Test all members in the workspace.

+
+
--exclude SPEC…​
+
+

Exclude the specified packages. Must be used in conjunction with the +--all flag. This flag may be specified multiple times.

+
+
+
+
+
+

Target Selection

+
+

When no target selection options are given, cargo test will build the +following targets of the selected packages:

+
+
+
    +
  • +

    lib — used to link with binaries, examples, integration tests, and doc tests

    +
  • +
  • +

    bins (only if integration tests are built and required features are +available)

    +
  • +
  • +

    examples — to ensure they compile

    +
  • +
  • +

    lib as a unit test

    +
  • +
  • +

    bins as unit tests

    +
  • +
  • +

    integration tests

    +
  • +
  • +

    doc tests for the lib target

    +
  • +
+
+
+

The default behavior can be changed by setting the test flag for the target +in the manifest settings. Setting examples to test = true will build and run +the example as a test. Setting targets to test = false will stop them from +being tested by default. Target selection options that take a target by name +ignore the test flag and will always test the given target.

+
+
+

Doc tests for libraries may be disabled by setting doctest = false for the +library in the manifest.

+
+
+

Passing target selection flags will test only the +specified targets.

+
+
+
+
--lib
+
+

Test the package’s library.

+
+
--bin NAME…​
+
+

Test the specified binary. This flag may be specified multiple times.

+
+
--bins
+
+

Test all binary targets.

+
+
--example NAME…​
+
+

Test the specified example. This flag may be specified multiple times.

+
+
--examples
+
+

Test all example targets.

+
+
--test NAME…​
+
+

Test the specified integration test. This flag may be specified multiple +times.

+
+
--tests
+
+

Test all targets in test mode that have the test = true manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the test flag in the +manifest settings for the target.

+
+
--bench NAME…​
+
+

Test the specified benchmark. This flag may be specified multiple times.

+
+
--benches
+
+

Test all targets in benchmark mode that have the bench = true +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the bench flag in the +manifest settings for the target.

+
+
--all-targets
+
+

Test all targets. This is equivalent to specifying --lib --bins +--tests --benches --examples.

+
+
--doc
+
+

Test only the library’s documentation. This cannot be mixed with other +target options.

+
+
+
+
+
+

Feature Selection

+
+

When no feature options are given, the default feature is activated for +every selected package.

+
+
+
+
--features FEATURES
+
+

Space or comma separated list of features to activate. These features only +apply to the current directory’s package. Features of direct dependencies +may be enabled with <dep-name>/<feature-name> syntax.

+
+
--all-features
+
+

Activate all available features of all selected packages.

+
+
--no-default-features
+
+

Do not activate the default feature of the current directory’s +package.

+
+
+
+
+
+

Compilation Options

+
+
+
--target TRIPLE
+
+

Test for the given architecture. The default is the host +architecture. The general format of the triple is +<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a +list of supported targets.

+
+

This may also be specified with the build.target +config value.

+
+
+
--release
+
+

Test optimized artifacts with the release profile. See the +PROFILES section for details on how this affects profile selection.

+
+
+
+
+
+

Output Options

+
+
+
--target-dir DIRECTORY
+
+

Directory for all generated artifacts and intermediate files. May also be +specified with the CARGO_TARGET_DIR environment variable, or the +build.target-dir config value. Defaults +to target in the root of the workspace.

+
+
+
+
+
+

Display Options

+
+

By default the Rust test harness hides output from test execution to keep +results readable. Test output can be recovered (e.g., for debugging) by passing +--nocapture to the test binaries:

+
+
+
+
cargo test -- --nocapture
+
+
+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
--message-format FMT
+
+

The output format for diagnostic messages. Valid values:

+
+
    +
  • +

    human (default): Display in a human-readable text format.

    +
  • +
  • +

    json: Emit JSON messages to stdout.

    +
  • +
  • +

    short: Emit shorter, human-readable text messages.

    +
  • +
+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+

Miscellaneous Options

+
+

The --jobs argument affects the building of the test executable but does not +affect how many threads are used when running the tests. The Rust test harness +includes an option to control the number of threads used:

+
+
+
+
cargo test -j 2 -- --test-threads=2
+
+
+
+
+
-j N
+
--jobs N
+
+

Number of parallel jobs to run. May also be specified with the +build.jobs config value. Defaults to +the number of CPUs.

+
+
+
+
+
+
+
+

PROFILES

+
+
+

Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +the reference +for more details.

+
+
+

Profile selection depends on the target and crate being built. By default the +dev or test profiles are used. If the --release flag is given, then the +release or bench profiles are used.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
TargetDefault Profile--release Profile

lib, bin, example

dev

release

test, bench, or any target
+ in "test" or "bench" mode

test

bench

+
+

Dependencies use the dev/release profiles.

+
+
+

Unit tests are separate executable artifacts which use the test/bench +profiles. Example targets are built the same as with cargo build (using the +dev/release profiles) unless you are building them with the test harness +(by setting test = true in the manifest or using the --example flag) in +which case they use the test/bench profiles. Library targets are built +with the dev/release profiles when linked to an integration test, binary, +or doctest.

+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Execute all the unit and integration tests of the current package:

    +
    +
    +
    cargo test
    +
    +
    +
  2. +
  3. +

    Run only a specific test within a specific integration test:

    +
    +
    +
    cargo test --test int_test_name -- modname::test_name
    +
    +
    +
  4. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-uninstall.html b/src/doc/man/generated/cargo-uninstall.html new file mode 100644 index 00000000000..ef24a2edc56 --- /dev/null +++ b/src/doc/man/generated/cargo-uninstall.html @@ -0,0 +1,183 @@ +

NAME

+
+

cargo-uninstall - Remove a Rust binary

+
+
+

SYNOPSIS

+
+
+

cargo uninstall [OPTIONS] [SPEC…​]

+
+
+
+
+

DESCRIPTION

+
+
+

This command removes a package installed with cargo-install(1). The SPEC +argument is a package ID specification of the package to remove (see +cargo-pkgid(1)).

+
+
+

By default all binaries are removed for a crate but the --bin and +--example flags can be used to only remove particular binaries.

+
+
+

The installation root is determined, in order of precedence:

+
+
+
    +
  • +

    --root option

    +
  • +
  • +

    CARGO_INSTALL_ROOT environment variable

    +
  • +
  • +

    install.root Cargo config value

    +
  • +
  • +

    CARGO_HOME environment variable

    +
  • +
  • +

    $HOME/.cargo

    +
  • +
+
+
+
+
+

OPTIONS

+
+
+

Install Options

+
+
+
-p
+
--package SPEC…​
+
+

Package to uninstall.

+
+
--bin NAME…​
+
+

Only uninstall the binary NAME.

+
+
--root DIR
+
+

Directory to uninstall packages from.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Uninstall a previously installed package.

    +
    +
    +
    cargo uninstall ripgrep
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-update.html b/src/doc/man/generated/cargo-update.html new file mode 100644 index 00000000000..496ddda42e0 --- /dev/null +++ b/src/doc/man/generated/cargo-update.html @@ -0,0 +1,233 @@ +

NAME

+
+

cargo-update - Update dependencies as recorded in the local lock file

+
+
+

SYNOPSIS

+
+
+

cargo update [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will update dependencies in the Cargo.lock file to the latest +version. It requires that the Cargo.lock file already exists as generated +by commands such as cargo-build(1) or cargo-generate-lockfile(1).

+
+
+
+
+

OPTIONS

+
+
+

Update Options

+
+
+
-p SPEC…​
+
--package SPEC…​
+
+

Update only the specified packages. This flag may be specified +multiple times. See cargo-pkgid(1) for the SPEC format.

+
+

If packages are specified with the -p flag, then a conservative update of +the lockfile will be performed. This means that only the dependency specified +by SPEC will be updated. Its transitive dependencies will be updated only if +SPEC cannot be updated without updating dependencies. All other dependencies +will remain locked at their currently recorded versions.

+
+
+

If -p is not specified, all dependencies are updated.

+
+
+
--aggressive
+
+

When used with -p, dependencies of SPEC are forced to update as well. +Cannot be used with --precise.

+
+
--precise PRECISE
+
+

When used with -p, allows you to specify a specific version number to +set the package to. If the package comes from a git repository, this can +be a git revision (such as a SHA hash or tag).

+
+
--dry-run
+
+

Displays what would be updated, but doesn’t actually write the lockfile.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Update all dependencies in the lockfile:

    +
    +
    +
    cargo update
    +
    +
    +
  2. +
  3. +

    Update only specific dependencies:

    +
    +
    +
    cargo update -p foo -p bar
    +
    +
    +
  4. +
  5. +

    Set a specific dependency to a specific version:

    +
    +
    +
    cargo update -p foo --precise 1.2.3
    +
    +
    +
  6. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-vendor.html b/src/doc/man/generated/cargo-vendor.html new file mode 100644 index 00000000000..95927267455 --- /dev/null +++ b/src/doc/man/generated/cargo-vendor.html @@ -0,0 +1,224 @@ +

NAME

+
+

cargo-vendor - Vendor all dependencies locally

+
+
+

SYNOPSIS

+
+
+

cargo vendor [OPTIONS] [PATH]

+
+
+
+
+

DESCRIPTION

+
+
+

This cargo subcommand will vendor all crates.io and git dependencies for a +project into the specified directory at <path>. After this command completes +the vendor directory specified by <path> will contain all remote sources from +dependencies specified. Additional manifests beyond the default one can be +specified with the -s option.

+
+
+

The cargo vendor command will also print out the configuration necessary +to use the vendored sources, which you will need to add to .cargo/config.

+
+
+
+
+

OPTIONS

+
+
+

Owner Options

+
+
+
-s MANIFEST
+
--sync MANIFEST
+
+

Specify extra Cargo.toml manifests to workspaces which should also be +vendored and synced to the output.

+
+
--no-delete
+
+

Don’t delete the "vendor" directory when vendoring, but rather keep all +existing contents of the vendor directory

+
+
--respect-source-config
+
+

Instead of ignoring [source] configuration by default in .cargo/config +read it and use it when downloading crates from crates.io, for example

+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Vendor all dependencies into a local "vendor" folder

    +
    +
    +
    cargo vendor
    +
    +
    +
  2. +
  3. +

    Vendor all dependencies into a local "third-part/vendor" folder

    +
    +
    +
    cargo vendor third-party/vendor
    +
    +
    +
  4. +
  5. +

    Vendor the current workspace as well as another to "vendor"

    +
    +
    +
    cargo vendor -s ../path/to/Cargo.toml
    +
    +
    +
  6. +
+
+
+
+
+

SEE ALSO

+
+ +
+
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-verify-project.html b/src/doc/man/generated/cargo-verify-project.html new file mode 100644 index 00000000000..9c5dc876e2e --- /dev/null +++ b/src/doc/man/generated/cargo-verify-project.html @@ -0,0 +1,191 @@ +

NAME

+
+

cargo-verify-project - Check correctness of crate manifest

+
+
+

SYNOPSIS

+
+
+

cargo verify-project [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

This command will parse the local manifest and check its validity. It emits a +JSON object with the result. A successful validation will display:

+
+
+
+
{"success":"true"}
+
+
+
+

An invalid workspace will display:

+
+
+
+
{"invalid":"human-readable error message"}
+
+
+
+
+
+

OPTIONS

+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--manifest-path PATH
+
+

Path to the Cargo.toml file. By default, Cargo searches in the current +directory or any parent directory for the Cargo.toml file.

+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

The workspace is OK.

+
+
1
+
+

The workspace is invalid.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Check the current workspace for errors:

    +
    +
    +
    cargo verify-project
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-version.html b/src/doc/man/generated/cargo-version.html new file mode 100644 index 00000000000..2c84a33d0c6 --- /dev/null +++ b/src/doc/man/generated/cargo-version.html @@ -0,0 +1,76 @@ +

NAME

+
+

cargo-version - Show version information

+
+
+

SYNOPSIS

+
+
+

cargo version [OPTIONS]

+
+
+
+
+

DESCRIPTION

+
+
+

Displays the version of Cargo.

+
+
+
+
+

OPTIONS

+
+
+
+
-v
+
--verbose
+
+

Display additional version information.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Display the version:

    +
    +
    +
    cargo version
    +
    +
    +
  2. +
  3. +

    The version is also available via flags:

    +
    +
    +
    cargo --version
    +cargo -V
    +
    +
    +
  4. +
  5. +

    Display extra version information:

    +
    +
    +
    cargo -Vv
    +
    +
    +
  6. +
+
+
+
+
+

SEE ALSO

+
+ +
+
\ No newline at end of file diff --git a/src/doc/man/generated/cargo-yank.html b/src/doc/man/generated/cargo-yank.html new file mode 100644 index 00000000000..ba6c1ab8fed --- /dev/null +++ b/src/doc/man/generated/cargo-yank.html @@ -0,0 +1,188 @@ +

NAME

+
+

cargo-yank - Remove a pushed crate from the index

+
+
+

SYNOPSIS

+
+
+

cargo yank [OPTIONS] --vers VERSION [CRATE]

+
+
+
+
+

DESCRIPTION

+
+
+

The yank command removes a previously published crate’s version from the +server’s index. This command does not delete any data, and the crate will +still be available for download via the registry’s download link.

+
+
+

Note that existing crates locked to a yanked version will still be able to +download the yanked version to use it. Cargo will, however, not allow any new +crates to be locked to any yanked version.

+
+
+

This command requires you to be authenticated with either the --token option +or using cargo-login(1).

+
+
+

If the crate name is not specified, it will use the package name from the +current directory.

+
+
+
+
+

OPTIONS

+
+
+

Owner Options

+
+
+
--vers VERSION
+
+

The version to yank or un-yank.

+
+
--undo
+
+

Undo a yank, putting a version back into the index.

+
+
--token TOKEN
+
+

API token to use when authenticating. This overrides the token stored in +the credentials file (which is created by cargo-login(1)).

+
+

Cargo config environment variables can be +used to override the tokens stored in the credentials file. The token for +crates.io may be specified with the CARGO_REGISTRY_TOKEN environment +variable. Tokens for other registries may be specified with environment +variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name +of the registry in all capital letters.

+
+
+
--index INDEX
+
+

The URL of the registry index to use.

+
+
--registry REGISTRY
+
+

Name of the registry to use. Registry names are defined in Cargo config files. +If not specified, the default registry is used, which is defined by the +registry.default config key which defaults to crates-io.

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Yank a crate from the index:

    +
    +
    +
    cargo yank --vers 1.0.7 foo
    +
    +
    +
  2. +
+
+
+
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/generated/cargo.html b/src/doc/man/generated/cargo.html new file mode 100644 index 00000000000..c501206d302 --- /dev/null +++ b/src/doc/man/generated/cargo.html @@ -0,0 +1,450 @@ +

NAME

+
+

cargo - The Rust package manager

+
+
+

SYNOPSIS

+
+
+

cargo [OPTIONS] COMMAND [ARGS]
+cargo [OPTIONS] --version
+cargo [OPTIONS] --list
+cargo [OPTIONS] --help
+cargo [OPTIONS] --explain CODE

+
+
+
+
+

DESCRIPTION

+
+
+

This program is a package manager and build tool for the Rust language, +available at https://rust-lang.org.

+
+
+
+
+

COMMANDS

+
+
+

Build Commands

+
+
+
cargo-bench(1)
+
+

Execute benchmarks of a package.

+
+
cargo-build(1)
+
+

Compile a package.

+
+
cargo-check(1)
+
+

Check a local package and all of its dependencies for errors.

+
+
cargo-clean(1)
+
+

Remove artifacts that Cargo has generated in the past.

+
+
cargo-doc(1)
+
+

Build a package’s documentation.

+
+
cargo-fetch(1)
+
+

Fetch dependencies of a package from the network.

+
+
cargo-fix(1)
+
+

Automatically fix lint warnings reported by rustc.

+
+
cargo-run(1)
+
+

Run a binary or example of the local package.

+
+
cargo-rustc(1)
+
+

Compile a package, and pass extra options to the compiler.

+
+
cargo-rustdoc(1)
+
+

Build a package’s documentation, using specified custom flags.

+
+
cargo-test(1)
+
+

Execute unit and integration tests of a package.

+
+
+
+
+
+

Manifest Commands

+
+
+
cargo-generate-lockfile(1)
+
+

Generate Cargo.lock for a project.

+
+
cargo-locate-project(1)
+
+

Print a JSON representation of a Cargo.toml file’s location.

+
+
cargo-metadata(1)
+
+

Output the resolved dependencies of a package, the concrete used versions +including overrides, in machine-readable format.

+
+
cargo-pkgid(1)
+
+

Print a fully qualified package specification.

+
+
cargo-update(1)
+
+

Update dependencies as recorded in the local lock file.

+
+
cargo-verify-project(1)
+
+

Check correctness of crate manifest.

+
+
+
+
+
+

Package Commands

+
+
+
cargo-init(1)
+
+

Create a new Cargo package in an existing directory.

+
+
cargo-install(1)
+
+

Build and install a Rust binary.

+
+
cargo-new(1)
+
+

Create a new Cargo package.

+
+
cargo-search(1)
+
+

Search packages in crates.io.

+
+
cargo-uninstall(1)
+
+

Remove a Rust binary.

+
+
+
+
+
+

Publishing Commands

+
+
+
cargo-login(1)
+
+

Save an API token from the registry locally.

+
+
cargo-owner(1)
+
+

Manage the owners of a crate on the registry.

+
+
cargo-package(1)
+
+

Assemble the local package into a distributable tarball.

+
+
cargo-publish(1)
+
+

Upload a package to the registry.

+
+
cargo-yank(1)
+
+

Remove a pushed crate from the index.

+
+
+
+
+
+

General Commands

+
+
+
cargo-help(1)
+
+

Display help information about Cargo.

+
+
cargo-version(1)
+
+

Show version information.

+
+
+
+
+
+
+
+

OPTIONS

+
+
+

Special Options

+
+
+
-V
+
--version
+
+

Print version info and exit. If used with --verbose, prints extra +information.

+
+
--list
+
+

List all installed Cargo subcommands. If used with --verbose, prints +extra information.

+
+
--explain CODE
+
+

Run rustc --explain CODE which will print out a detailed explanation of +an error message (for example, E0004).

+
+
+
+
+
+

Display Options

+
+
+
-v
+
--verbose
+
+

Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the term.verbose +config value.

+
+
-q
+
--quiet
+
+

No output printed to stdout.

+
+
--color WHEN
+
+

Control when colored output is used. Valid values:

+
+
    +
  • +

    auto (default): Automatically detect if color support is available on the +terminal.

    +
  • +
  • +

    always: Always display colors.

    +
  • +
  • +

    never: Never display colors.

    +
  • +
+
+
+

May also be specified with the term.color +config value.

+
+
+
+
+
+
+

Manifest Options

+
+
+
--frozen
+
--locked
+
+

Either of these flags requires that the Cargo.lock file is +up-to-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The --frozen flag also prevents Cargo from +attempting to access the network to determine if it is out-of-date.

+
+

These may be used in environments where you want to assert that the +Cargo.lock file is up-to-date (such as a CI build) or want to avoid network +access.

+
+
+
--offline
+
+

Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible.

+
+

Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the cargo-fetch(1) command to download dependencies before going +offline.

+
+
+

May also be specified with the net.offline config value.

+
+
+
+
+
+
+

Common Options

+
+
+
-h
+
--help
+
+

Prints help information.

+
+
-Z FLAG…​
+
+

Unstable (nightly-only) flags to Cargo. Run cargo -Z help for +details.

+
+
+
+
+
+
+
+

ENVIRONMENT

+
+
+

See the reference for +details on environment variables that Cargo reads.

+
+
+
+
+

Exit Status

+
+
+
+
0
+
+

Cargo succeeded.

+
+
101
+
+

Cargo failed to complete.

+
+
+
+
+
+
+

FILES

+
+
+
+
~/.cargo/
+
+

Default location for Cargo’s "home" directory where it stores various +files. The location can be changed with the CARGO_HOME environment +variable.

+
+
$CARGO_HOME/bin/
+
+

Binaries installed by cargo-install(1) will be located here. If using +rustup, executables distributed with Rust are also located here.

+
+
$CARGO_HOME/config
+
+

The global configuration file. See the reference +for more information about configuration files.

+
+
.cargo/config
+
+

Cargo automatically searches for a file named .cargo/config in the +current directory, and all parent directories. These configuration files +will be merged with the global configuration file.

+
+
$CARGO_HOME/credentials
+
+

Private authentication information for logging in to a registry.

+
+
$CARGO_HOME/registry/
+
+

This directory contains cached downloads of the registry index and any +downloaded dependencies.

+
+
$CARGO_HOME/git/
+
+

This directory contains cached downloads of git dependencies.

+
+
+
+
+
+
+

EXAMPLES

+
+
+
    +
  1. +

    Build a local package and all of its dependencies:

    +
    +
    +
    cargo build
    +
    +
    +
  2. +
  3. +

    Build a package with optimizations:

    +
    +
    +
    cargo build --release
    +
    +
    +
  4. +
  5. +

    Run tests for a cross-compiled target:

    +
    +
    +
    cargo test --target i686-unknown-linux-gnu
    +
    +
    +
  6. +
  7. +

    Create a new package that builds an executable:

    +
    +
    +
    cargo new foobar
    +
    +
    +
  8. +
  9. +

    Create a package in the current directory:

    +
    +
    +
    mkdir foo && cd foo
    +cargo init .
    +
    +
    +
  10. +
  11. +

    Learn about a command’s options and usage:

    +
    +
    +
    cargo help clean
    +
    +
    +
  12. +
+
+
+
+
+

BUGS

+ +
+
+

SEE ALSO

+ +
\ No newline at end of file diff --git a/src/doc/man/options-common.adoc b/src/doc/man/options-common.adoc new file mode 100644 index 00000000000..b8c2fce380c --- /dev/null +++ b/src/doc/man/options-common.adoc @@ -0,0 +1,7 @@ +*-h*:: +*--help*:: + Prints help information. + +*-Z* _FLAG_...:: + Unstable (nightly-only) flags to Cargo. Run `cargo -Z help` for + details. diff --git a/src/doc/man/options-display.adoc b/src/doc/man/options-display.adoc new file mode 100644 index 00000000000..cc2e2263398 --- /dev/null +++ b/src/doc/man/options-display.adoc @@ -0,0 +1,22 @@ +*-v*:: +*--verbose*:: + Use verbose output. May be specified twice for "very verbose" output which + includes extra output such as dependency warnings and build script output. + May also be specified with the `term.verbose` + linkcargo:reference/config.html[config value]. + +*-q*:: +*--quiet*:: + No output printed to stdout. + +*--color* _WHEN_:: + Control when colored output is used. Valid values: ++ +- `auto` (default): Automatically detect if color support is available on the + terminal. +- `always`: Always display colors. +- `never`: Never display colors. + ++ +May also be specified with the `term.color` +linkcargo:reference/config.html[config value]. diff --git a/src/doc/man/options-features.adoc b/src/doc/man/options-features.adoc new file mode 100644 index 00000000000..666ef21e4ec --- /dev/null +++ b/src/doc/man/options-features.adoc @@ -0,0 +1,16 @@ +=== Feature Selection + +When no feature options are given, the `default` feature is activated for +every selected package. + +*--features* _FEATURES_:: + Space or comma separated list of features to activate. These features only + apply to the current directory's package. Features of direct dependencies + may be enabled with `/` syntax. + +*--all-features*:: + Activate all available features of all selected packages. + +*--no-default-features*:: + Do not activate the `default` feature of the current directory's + package. diff --git a/src/doc/man/options-index.adoc b/src/doc/man/options-index.adoc new file mode 100644 index 00000000000..1321866bae7 --- /dev/null +++ b/src/doc/man/options-index.adoc @@ -0,0 +1,2 @@ +*--index* _INDEX_:: + The URL of the registry index to use. diff --git a/src/doc/man/options-jobs.adoc b/src/doc/man/options-jobs.adoc new file mode 100644 index 00000000000..9d817426ba1 --- /dev/null +++ b/src/doc/man/options-jobs.adoc @@ -0,0 +1,5 @@ +*-j* _N_:: +*--jobs* _N_:: + Number of parallel jobs to run. May also be specified with the + `build.jobs` linkcargo:reference/config.html[config value]. Defaults to + the number of CPUs. diff --git a/src/doc/man/options-locked.adoc b/src/doc/man/options-locked.adoc new file mode 100644 index 00000000000..45bbfa5117d --- /dev/null +++ b/src/doc/man/options-locked.adoc @@ -0,0 +1,24 @@ +*--frozen*:: +*--locked*:: + Either of these flags requires that the `Cargo.lock` file is + up-to-date. If the lock file is missing, or it needs to be updated, Cargo will + exit with an error. The `--frozen` flag also prevents Cargo from + attempting to access the network to determine if it is out-of-date. ++ +These may be used in environments where you want to assert that the +`Cargo.lock` file is up-to-date (such as a CI build) or want to avoid network +access. + +*--offline*:: + Prevents Cargo from accessing the network for any reason. Without this + flag, Cargo will stop with an error if it needs to access the network and + the network is not available. With this flag, Cargo will attempt to + proceed without the network if possible. ++ +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the man:cargo-fetch[1] command to download dependencies before going +offline. ++ +May also be specified with the `net.offline` linkcargo:reference/config.html[config value]. diff --git a/src/doc/man/options-manifest-path.adoc b/src/doc/man/options-manifest-path.adoc new file mode 100644 index 00000000000..1bc4d80eb94 --- /dev/null +++ b/src/doc/man/options-manifest-path.adoc @@ -0,0 +1,3 @@ +*--manifest-path* _PATH_:: + Path to the `Cargo.toml` file. By default, Cargo searches in the current + directory or any parent directory for the `Cargo.toml` file. diff --git a/src/doc/man/options-message-format.adoc b/src/doc/man/options-message-format.adoc new file mode 100644 index 00000000000..6da9c26bd58 --- /dev/null +++ b/src/doc/man/options-message-format.adoc @@ -0,0 +1,6 @@ +*--message-format* _FMT_:: + The output format for diagnostic messages. Valid values: ++ +- `human` (default): Display in a human-readable text format. +- `json`: Emit JSON messages to stdout. +- `short`: Emit shorter, human-readable text messages. diff --git a/src/doc/man/options-new.adoc b/src/doc/man/options-new.adoc new file mode 100644 index 00000000000..2218599ae2a --- /dev/null +++ b/src/doc/man/options-new.adoc @@ -0,0 +1,29 @@ +*--bin*:: + Create a package with a binary target (`src/main.rs`). + This is the default behavior. + +*--lib*:: + Create a package with a library target (`src/lib.rs`). + +*--edition* _EDITION_:: + Specify the Rust edition to use. Default is 2018. + Possible values: 2015, 2018 + +*--name* _NAME_:: + Set the package name. Defaults to the directory name. + +*--vcs* _VCS_:: + Initialize a new VCS repository for the given version control system (git, + hg, pijul, or fossil) or do not initialize any version control at all + (none). If not specified, defaults to `git` or the configuration value + `cargo-new.vcs`, or `none` if already inside a VCS repository. + +*--registry* _REGISTRY_:: + This sets the `publish` field in `Cargo.toml` to the given registry name + which will restrict publishing only to that registry. ++ +Registry names are defined in linkcargo:reference/config.html[Cargo config files]. +If not specified, the default registry defined by the `registry.default` +config key is used. If the default registry is not set and `--registry` is not +used, the `publish` field will not be set which means that publishing will not +be restricted. diff --git a/src/doc/man/options-package.adoc b/src/doc/man/options-package.adoc new file mode 100644 index 00000000000..c0cfbc35eef --- /dev/null +++ b/src/doc/man/options-package.adoc @@ -0,0 +1,7 @@ +By default, the package in the current working directory is selected. The `-p` +flag can be used to choose a different package in a workspace. + +*-p* _SPEC_:: +*--package* _SPEC_:: + The package to convert:lowercase[{actionverb}]. See man:cargo-pkgid[1] for + the SPEC format. diff --git a/src/doc/man/options-packages.adoc b/src/doc/man/options-packages.adoc new file mode 100644 index 00000000000..51ab9f0c370 --- /dev/null +++ b/src/doc/man/options-packages.adoc @@ -0,0 +1,18 @@ +By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (`--all` is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the `workspace.default-members` key in the root `Cargo.toml` +manifest. + +*-p* _SPEC_...:: +*--package* _SPEC_...:: + {actionverb} only the specified packages. See man:cargo-pkgid[1] for the + SPEC format. This flag may be specified multiple times. + +*--all*:: + {actionverb} all members in the workspace. + +*--exclude* _SPEC_...:: + Exclude the specified packages. Must be used in conjunction with the + `--all` flag. This flag may be specified multiple times. diff --git a/src/doc/man/options-profile.adoc b/src/doc/man/options-profile.adoc new file mode 100644 index 00000000000..3c5ad14c764 --- /dev/null +++ b/src/doc/man/options-profile.adoc @@ -0,0 +1,6 @@ +*--profile* _NAME_:: + Changes convert:lowercase[{actionverb}] behavior. Currently only `test` is + supported, which will convert:lowercase[{actionverb}] with the + `#[cfg(test)]` attribute enabled. This is useful to have it + convert:lowercase[{actionverb}] unit tests which are usually excluded via + the `cfg` attribute. This does not change the actual profile used. diff --git a/src/doc/man/options-registry.adoc b/src/doc/man/options-registry.adoc new file mode 100644 index 00000000000..a0c4c27c8e8 --- /dev/null +++ b/src/doc/man/options-registry.adoc @@ -0,0 +1,4 @@ +*--registry* _REGISTRY_:: + Name of the registry to use. Registry names are defined in linkcargo:reference/config.html[Cargo config files]. + If not specified, the default registry is used, which is defined by the + `registry.default` config key which defaults to `crates-io`. diff --git a/src/doc/man/options-release.adoc b/src/doc/man/options-release.adoc new file mode 100644 index 00000000000..e99539172a3 --- /dev/null +++ b/src/doc/man/options-release.adoc @@ -0,0 +1,3 @@ +*--release*:: + {actionverb} optimized artifacts with the `release` profile. See the + <> section for details on how this affects profile selection. diff --git a/src/doc/man/options-target-dir.adoc b/src/doc/man/options-target-dir.adoc new file mode 100644 index 00000000000..f044bd71223 --- /dev/null +++ b/src/doc/man/options-target-dir.adoc @@ -0,0 +1,5 @@ +*--target-dir* _DIRECTORY_:: + Directory for all generated artifacts and intermediate files. May also be + specified with the `CARGO_TARGET_DIR` environment variable, or the + `build.target-dir` linkcargo:reference/config.html[config value]. Defaults + to `target` in the root of the workspace. diff --git a/src/doc/man/options-target-triple.adoc b/src/doc/man/options-target-triple.adoc new file mode 100644 index 00000000000..eac97d88505 --- /dev/null +++ b/src/doc/man/options-target-triple.adoc @@ -0,0 +1,8 @@ +*--target* _TRIPLE_:: + {actionverb} for the given architecture. The default is the host + architecture. The general format of the triple is + `---`. Run `rustc --print target-list` for a + list of supported targets. ++ +This may also be specified with the `build.target` +linkcargo:reference/config.html[config value]. diff --git a/src/doc/man/options-targets-lib-bin.adoc b/src/doc/man/options-targets-lib-bin.adoc new file mode 100644 index 00000000000..8668ba84ba9 --- /dev/null +++ b/src/doc/man/options-targets-lib-bin.adoc @@ -0,0 +1,8 @@ +*--lib*:: + {actionverb} the package's library. + +*--bin* _NAME_...:: + {actionverb} the specified binary. This flag may be specified multiple times. + +*--bins*:: + {actionverb} all binary targets. diff --git a/src/doc/man/options-targets.adoc b/src/doc/man/options-targets.adoc new file mode 100644 index 00000000000..6a8a46cd714 --- /dev/null +++ b/src/doc/man/options-targets.adoc @@ -0,0 +1,39 @@ +Passing target selection flags will convert:lowercase[{actionverb}] only the +specified targets. + +include::options-targets-lib-bin.adoc[] + +*--example* _NAME_...:: + {actionverb} the specified example. This flag may be specified multiple times. + +*--examples*:: + {actionverb} all example targets. + +*--test* _NAME_...:: + {actionverb} the specified integration test. This flag may be specified multiple + times. + +*--tests*:: + {actionverb} all targets in test mode that have the `test = true` manifest + flag set. By default this includes the library and binaries built as + unittests, and integration tests. Be aware that this will also build any + required dependencies, so the lib target may be built twice (once as a + unittest, and once as a dependency for binaries, integration tests, etc.). + Targets may be enabled or disabled by setting the `test` flag in the + manifest settings for the target. + +*--bench* _NAME_...:: + {actionverb} the specified benchmark. This flag may be specified multiple times. + +*--benches*:: + {actionverb} all targets in benchmark mode that have the `bench = true` + manifest flag set. By default this includes the library and binaries built + as benchmarks, and bench targets. Be aware that this will also build any + required dependencies, so the lib target may be built twice (once as a + benchmark, and once as a dependency for binaries, benchmarks, etc.). + Targets may be enabled or disabled by setting the `bench` flag in the + manifest settings for the target. + +*--all-targets*:: + {actionverb} all targets. This is equivalent to specifying `--lib --bins + --tests --benches --examples`. diff --git a/src/doc/man/options-test.adoc b/src/doc/man/options-test.adoc new file mode 100644 index 00000000000..0cdcb3d7efc --- /dev/null +++ b/src/doc/man/options-test.adoc @@ -0,0 +1,8 @@ +*--no-run*:: + Compile, but don't run {nouns}. + +*--no-fail-fast*:: + Run all {nouns} regardless of failure. Without this flag, Cargo will exit + after the first executable fails. The Rust test harness will run all + {nouns} within the executable to completion, this flag only applies to + the executable as a whole. diff --git a/src/doc/man/options-token.adoc b/src/doc/man/options-token.adoc new file mode 100644 index 00000000000..5f25ffbf243 --- /dev/null +++ b/src/doc/man/options-token.adoc @@ -0,0 +1,10 @@ +*--token* _TOKEN_:: + API token to use when authenticating. This overrides the token stored in + the credentials file (which is created by man:cargo-login[1]). ++ +linkcargo:reference/config.html[Cargo config] environment variables can be +used to override the tokens stored in the credentials file. The token for +crates.io may be specified with the `CARGO_REGISTRY_TOKEN` environment +variable. Tokens for other registries may be specified with environment +variables of the form `CARGO_REGISTRIES_NAME_TOKEN` where `NAME` is the name +of the registry in all capital letters. diff --git a/src/doc/man/section-environment.adoc b/src/doc/man/section-environment.adoc new file mode 100644 index 00000000000..5cc69e995a0 --- /dev/null +++ b/src/doc/man/section-environment.adoc @@ -0,0 +1,4 @@ +== ENVIRONMENT + +See linkcargo:reference/environment-variables.html[the reference] for +details on environment variables that Cargo reads. diff --git a/src/doc/man/section-exit-status.adoc b/src/doc/man/section-exit-status.adoc new file mode 100644 index 00000000000..427b3903c19 --- /dev/null +++ b/src/doc/man/section-exit-status.adoc @@ -0,0 +1,7 @@ +== Exit Status + +0:: + Cargo succeeded. + +101:: + Cargo failed to complete. diff --git a/src/doc/man/section-profiles.adoc b/src/doc/man/section-profiles.adoc new file mode 100644 index 00000000000..02cf4acaa0b --- /dev/null +++ b/src/doc/man/section-profiles.adoc @@ -0,0 +1,26 @@ +== PROFILES + +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +linkcargo:reference/manifest.html#the-profile-sections[the reference] +for more details. + +Profile selection depends on the target and crate being built. By default the +`dev` or `test` profiles are used. If the `--release` flag is given, then the +`release` or `bench` profiles are used. + +[%autowidth] +|=== +|Target |Default Profile |`--release` Profile + +|lib, bin, example +|`dev` +|`release` + +|test, bench, or any target + + in "test" or "bench" mode +|`test` +|`bench` +|=== + +Dependencies use the `dev`/`release` profiles. diff --git a/src/doc/manifest.md b/src/doc/manifest.md deleted file mode 100644 index 8b5d02e46b6..00000000000 --- a/src/doc/manifest.md +++ /dev/null @@ -1,511 +0,0 @@ -% The Manifest Format - Cargo Documentation - -# The `[package]` Section - -The first section in a `Cargo.toml` is `[package]`. - -```toml -[package] -name = "hello_world" # the name of the package -version = "0.1.0" # the current version, obeying semver -authors = ["you@example.com"] -``` - -All three of these fields are mandatory. Cargo bakes in the concept of -[Semantic Versioning](http://semver.org/), so make sure you follow some -basic rules: - -* Before you reach 1.0.0, anything goes. -* After 1.0.0, only make breaking changes when you increment the major - version. In Rust, breaking changes include adding fields to structs or - variants to enums. Don't break the build. -* After 1.0.0, don't add any new public API (no new `pub` anything) in - tiny versions. Always increment the minor version if you add any new - `pub` structs, traits, fields, types, functions, methods or anything else. -* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0. - -For more on versions, see [this -documentation](crates-io.html#using-crates.io-based-crates). - -## The `build` Field (optional) - -This field specifies a file in the repository which is a [build -script][1] for building native code. More information can be -found in the build script [guide][1]. - -[1]: build-script.html - -```toml -[package] -# ... -build = "build.rs" -``` - -## The `exclude` and `include` Fields (optional) - -You can explicitly specify to Cargo that a set of [globs][globs] should be ignored or -included for the purposes of packaging and rebuilding a package. The globs -specified in the `exclude` field identify a set of files that are not included -when a package is published as well as ignored for the purposes of detecting -when to rebuild a package, and the globs in `include` specify files that are -explicitly included. - -If a VCS is being used for a package, the `exclude` field will be seeded with -the VCS's ignore settings (`.gitignore` for git for example). - -```toml -[package] -# ... -exclude = ["build/**/*.o", "doc/**/*.html"] -``` - -```toml -[package] -# ... -include = ["src/**/*", "Cargo.toml"] -``` - -The options are mutually exclusive: setting `include` will override an -`exclude`. Note that `include` must be an exhaustive list of files as otherwise -necessary source files may not be included. - -[globs]: http://doc.rust-lang.org/glob/glob/struct.Pattern.html - -## Package metadata - -There are a number of optional metadata fields also accepted under the -`[package]` section: - -```toml -[package] -# ... - -# A short blurb about the package. This is not rendered in any format when -# uploaded to crates.io (aka this is not markdown) -description = "..." - -# These URLs point to more information about the repository -documentation = "..." -homepage = "..." -repository = "..." - -# This points to a file in the repository (relative to this Cargo.toml). The -# contents of this file are stored and indexed in the registry. -readme = "..." - -# This is a small list of keywords used to categorize and search for this -# package. -keywords = ["...", "..."] - -# This is a string description of the license for this package. Currently -# crates.io will validate the license provided against a whitelist of known -# license identifiers from http://spdx.org/licenses/. Multiple licenses can -# be separated with a `/` -license = "..." - -# If a project is using a nonstandard license, then this key may be specified in -# lieu of the above key and must point to a file relative to this manifest -# (similar to the readme key) -license-file = "..." -``` - -The [crates.io](https://crates.io) registry will render the description, display -the license, link to the three URLs and categorize by the keywords. These keys -provide useful information to users of the registry and also influence the -search ranking of a crate. It is highly discouraged to omit everything in a -published crate. - - -# The `[dependencies.*]` Sections - -You list dependencies using `[dependencies.]`. For example, if you -wanted to depend on both `hammer` and `color`: - -```toml -[package] -# ... - -[dependencies.hammer] -version = "0.5.0" # optional -git = "https://github.com/wycats/hammer.rs" - -[dependencies.color] -git = "https://github.com/bjz/color-rs" - -[dependencies.geometry] -path = "crates/geometry" -``` - -You may prefer to use TOML's inline table syntax: - -```toml -[dependencies] -hammer = { version = "0.5.0", git = "https://github.com/wycats/hammer.rs" } -color = { git = "https://github.com/bjz/color-rs" } -geometry = { path = "crates/geometry" } -``` - -You can specify the source of a dependency in one of two ways at the moment: - -* `git = ""`: A git repository with a `Cargo.toml` in its root. The - `rev`, `tag`, and `branch` options are also recognized to use something other - than the `master` branch. -* `path = ""`: A path relative to the current `Cargo.toml` - with a `Cargo.toml` in its root. - -Dependencies from crates.io are not declared with separate sections: - -```toml -[dependencies] -hammer = "0.5.0" -color = "0.6.0" -``` - -The syntax of the requirement strings is described in the [crates.io guide](crates-io.html#using-crates.io-based-crates). - -Platform-specific dependencies take the same format, but are listed under the -`target.$triple` section: - -```toml -[target.x86_64-pc-windows-gnu.dependencies] -winhttp = "0.4.0" - -[target.i686-unknown-linux-gnu.dependencies] -openssl = "1.0.1" -native = { path = "native/i686" } - -[target.x86_64-unknown-linux-gnu.dependencies] -openssl = "1.0.1" -native = { path = "native/x86_64" } -``` - -If you're using a target file, quote the full path and file name: - -```toml -[target."x86_64/windows.json".dependencies] -winhttp = "0.4.0" - -[target."i686/linux.json".dependencies] -openssl = "1.0.1" -native = { path = "native/i686" } - -[target."x86_64/linux.json".dependencies] -openssl = "1.0.1" -native = { path = "native/x86_64" } -``` - -# The `[profile.*]` Sections - -Cargo supports custom configuration of how rustc is invoked through **profiles** -at the top level. Any manifest may declare a profile, but only the **top level** -project's profiles are actually read. All dependencies' profiles will be -overridden. This is done so the top-level project has control over how its -dependencies are compiled. - -There are five currently supported profile names, all of which have the same -configuration available to them. Listed below is the configuration available, -along with the defaults for each profile. - -```toml -# The development profile, used for `cargo build` -[profile.dev] -opt-level = 0 # Controls the --opt-level the compiler builds with -debug = true # Controls whether the compiler passes `-g` -rpath = false # Controls whether the compiler passes `-C rpath` -lto = false # Controls `-C lto` for binaries and staticlibs -debug-assertions = true # Controls whether debug assertions are enabled -codegen-units = 1 # Controls whether the compiler passes `-C codegen-units` - # `codegen-units` is ignored when `lto = true` - -# The release profile, used for `cargo build --release` -[profile.release] -opt-level = 3 -debug = false -rpath = false -lto = false -debug-assertions = false -codegen-units = 1 - -# The testing profile, used for `cargo test` -[profile.test] -opt-level = 0 -debug = true -rpath = false -lto = false -debug-assertions = true -codegen-units = 1 - -# The benchmarking profile, used for `cargo bench` -[profile.bench] -opt-level = 3 -debug = false -rpath = false -lto = false -debug-assertions = false -codegen-units = 1 - -# The documentation profile, used for `cargo doc` -[profile.doc] -opt-level = 0 -debug = true -rpath = false -lto = false -debug-assertions = true -codegen-units = 1 -``` - -# The `[features]` Section - -Cargo supports **features** to allow expression of: - -* Optional dependencies, which enhance a package, but are not required -* Clusters of optional dependencies, such as "postgres", that would include the - `postgres` package, the `postgres-macros` package, and possibly other packages - (such as development-time mocking libraries, debugging tools, etc.) - -A feature of a package is either an optional dependency, or a set of other -features. The format for specifying features is: - -```toml -[package] -name = "awesome" - -[features] -# The "default" set of optional packages. Most people will -# want to use these packages, but they are strictly optional. -# Note that `session` is not a package but rather another -# feature listed in this manifest. -default = ["jquery", "uglifier", "session"] - -# The "secure-password" feature depends on the bcrypt package. -# This aliasing will allow people to talk about the feature in -# a higher-level way and allow this package to add more -# requirements to the feature in the future. -secure-password = ["bcrypt"] - -# Features can be used to reexport features of other packages. -# The `session` feature of package `awesome` will ensure that the -# `session` feature of the package `cookie` is also enabled. -session = ["cookie/session"] - -[dependencies] -# These packages are mandatory and form the core of this -# package's distribution -cookie = "1.2.0" -oauth = "1.1.0" -route-recognizer = "=2.1.0" - -# A list of all of the optional dependencies, some of which -# are included in the above "features". They can be opted -# into by apps. -[dependencies.jquery] -version = "1.0.2" -optional = true - -[dependencies.uglifier] -version = "1.5.3" -optional = true - -[dependencies.bcrypt] -version = "*" -optional = true - -[dependencies.civet] -version = "*" -optional = true -``` - -To use the package `awesome`: - -```toml -[dependencies.awesome] -version = "1.3.5" -features = ["secure-password", "civet"] - -# do not include the default features, and optionally -# cherry-pick individual features -default-features = false -``` - -## Rules - -The usage of features is subject to a few rules: - -1. Feature names must not conflict with other package names in the manifest. - This is because they are opted into via `features = [...]`, which only has a - single namespace -2. With the exception of the `default` feature, all features are opt-in. To opt - out of the default feature, use `default-features = false` and cherry-pick - individual features. -3. Feature groups are not allowed to cyclicly depend on one another. -4. Dev-dependencies cannot be optional -5. Features groups can only reference optional dependencies -6. When a feature is selected, Cargo will call `rustc` with - `--cfg feature="${feature_name}"`. If a feature group is included, - it and all of its individual features will be included. This can be - tested in code via `#[cfg(feature = "foo")]` - -Note that it is explicitly allowed for features to not actually activate any -optional dependencies. This allows packages to internally enable/disable -features without requiring a new dependency. - -## Usage In End Products - -One major use-case for this feature is specifying optional features in -end-products. For example, the Servo project may want to include optional -features that people can enable or disable when they build it. - -In that case, Servo will describe features in its `Cargo.toml` and they can be -enabled using command-line flags: - -``` -$ cargo build --release --features "shumway pdf" -``` - -Default features could be excluded using `--no-default-features`. - -## Usage In Packages - -In most cases, the concept of "optional dependency" in a library is best -expressed as a separate package that the top-level application depends on. - -However, high-level packages, like Iron or Piston, may want the ability to -curate a number of packages for easy installation. The current Cargo system -allows them to curate a number of mandatory dependencies into a single package -for easy installation. - -In some cases, packages may want to provide additional curation for **optional** -dependencies: - -* Grouping a number of low-level optional dependencies together into a single - high-level "feature". -* Specifying packages that are recommended (or suggested) to be included by - users of the package. -* Including a feature (like `secure-password` in the motivating example) that - will only work if an optional dependency is available, and would be difficult - to implement as a separate package. For example, it may be overly difficult to - design an IO package to be completely decoupled from OpenSSL, with opt-in via - the inclusion of a separate package. - -In almost all cases, it is an antipattern to use these features outside of -high-level packages that are designed for curation. If a feature is optional, it -can almost certainly be expressed as a separate package. - -# The `[dev-dependencies.*]` Sections - -The format of this section is equivalent to `[dependencies.*]`. Dev-dependencies -are not used when compiling a package for building, but are used for compiling -tests and benchmarks. - -These dependencies are *not* propagated to other packages which depend on this -package. - -# The Project Layout - -If your project is an executable, name the main source file `src/main.rs`. -If it is a library, name the main source file `src/lib.rs`. - -Cargo will also treat any files located in `src/bin/*.rs` as -executables. - -Your project can optionally contain folders named `examples`, `tests`, and -`benches`, which Cargo will treat as containing example executable files, -integration tests, and benchmarks respectively. - -```notrust -▾ src/ # directory containing source files - lib.rs # the main entry point for libraries and packages - main.rs # the main entry point for projects producing executables - ▾ bin/ # (optional) directory containing additional executables - *.rs -▾ examples/ # (optional) examples - *.rs -▾ tests/ # (optional) integration tests - *.rs -▾ benches/ # (optional) benchmarks - *.rs -``` - -# Examples - -Files located under `examples` are example uses of the functionality -provided by the library. When compiled, they are placed in the -`target/examples` directory. - -They must compile as executables (with a `main()` function) and load in the -library by using `extern crate `. They are compiled when you run -your tests to protect them from bitrotting. - -# Tests - -When you run `cargo test`, Cargo will: - -* Compile your library's unit tests, which are in files reachable from - `lib.rs`. Any sections marked with `#[cfg(test)]` will be included. -* Compile your library’s documentation tests, which are embedded inside - of documentation blocks. -* Compile your library's integration tests, which are located in - `tests`. Files in `tests` load in your library by using `extern crate - ` like any other code that depends on it. -* Compile your library's examples. - -# Configuring a target - -All of the `[[bin]]`, `[lib]`, `[[bench]]`, and `[[test]]` sections support -similar configuration for specifying how a target should be built. The example -below uses `[lib]`, but it also applies to all other sections as well. All -values listed are the defaults for that option unless otherwise specified. - -```toml -[package] -# ... - -[lib] -# The name of a target is the name of the library that will be generated. This -# is defaulted to the name of the package or project. -name = "foo" - -# This field points at where the crate is located, relative to the Cargo.toml. -path = "src/lib.rs" - -# A flag for enabling unit tests for this target. This is used by `cargo test`. -test = true - -# A flag for enabling documentation tests for this target. This is only -# relevant for libraries, it has no effect on other sections. This is used by -# `cargo test`. -doctest = true - -# A flag for enabling benchmarks for this target. This is used by `cargo bench`. -bench = true - -# A flag for enabling documentation of this target. This is used by `cargo doc`. -doc = true - -# If the target is meant to be a compiler plugin, this field must be set to true -# for cargo to correctly compile it and make it available for all dependencies. -plugin = false - -# If set to false, `cargo test` will omit the --test flag to rustc, which stops -# it from generating a test harness. This is useful when the binary being built -# manages the test runner itself. -harness = true -``` - -# Building Dynamic or Static Libraries - -If your project produces a library, you can specify which kind of -library to build by explicitly listing the library in your `Cargo.toml`: - -```toml -# ... - -[lib] -name = "..." -# this could be "staticlib" as well -crate-type = ["dylib"] -``` - -The available options are `dylib`, `rlib`, and `staticlib`. You should only use -this option in a project. Cargo will always compile **packages** (dependencies) -based on the requirements of the project that includes them. diff --git a/src/doc/pkgid-spec.md b/src/doc/pkgid-spec.md deleted file mode 100644 index 30b736d6925..00000000000 --- a/src/doc/pkgid-spec.md +++ /dev/null @@ -1,44 +0,0 @@ -% Package ID Specifications - Cargo Documentation - -# Package ID Specifications - -Subcommands of cargo frequently need to refer to a particular package within a -dependency graph for various operations like updating, cleaning, building etc. -To solve this problem, cargo supports Package ID Specifications. A specification -is a string which is used to uniquely refer to one package within a graph of -packages. - -## Specification grammar - -The formal grammar for a Package Id Specification is: - -```notrust -pkgid := pkgname - | [ proto "://" ] hostname-and-path [ "#" ( pkgname | semver ) ] -pkgname := name [ ":" semver ] - -proto := "http" | "git" | ... -``` - -Here, brackets indicate that the contents are optional. - -## Example Specifications - -These could all be references to a package `foo` version `1.2.3` from the -registry at `crates.io` - -| pkgid | name | version | url | -|-------------------------------:|:------:|:---------:|:--------------------:| -| `foo` | foo | * | * | -| `foo:1.2.3` | foo | 1.2.3 | * | -| `crates.io/foo` | foo | * | *://crates.io/foo | -| `crates.io/foo#1.2.3` | foo | 1.2.3 | *://crates.io/foo | -| `crates.io/bar#foo:1.2.3` | foo | 1.2.3 | *://crates.io/bar | -| `http://crates.io/foo#1.2.3` | foo | 1.2.3 | http://crates.io/foo | - -## Brevity of Specifications - -The goal of this is to enable both succinct and exhaustive syntaxes for -referring to packages in a dependency graph. Ambiguous references may refer to -one or more packages. Most commands generate an error if more than one package -could be referred to with the same specification. diff --git a/src/doc/src/SUMMARY.md b/src/doc/src/SUMMARY.md new file mode 100644 index 00000000000..4943d251268 --- /dev/null +++ b/src/doc/src/SUMMARY.md @@ -0,0 +1,71 @@ +# Summary + +[Introduction](index.md) + +* [Getting Started](getting-started/index.md) + * [Installation](getting-started/installation.md) + * [First Steps with Cargo](getting-started/first-steps.md) + +* [Cargo Guide](guide/index.md) + * [Why Cargo Exists](guide/why-cargo-exists.md) + * [Creating a New Package](guide/creating-a-new-project.md) + * [Working on an Existing Package](guide/working-on-an-existing-project.md) + * [Dependencies](guide/dependencies.md) + * [Package Layout](guide/project-layout.md) + * [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.md) + * [Tests](guide/tests.md) + * [Continuous Integration](guide/continuous-integration.md) + * [Build Cache](guide/build-cache.md) + +* [Cargo Reference](reference/index.md) + * [Specifying Dependencies](reference/specifying-dependencies.md) + * [The Manifest Format](reference/manifest.md) + * [Configuration](reference/config.md) + * [Environment Variables](reference/environment-variables.md) + * [Build Scripts](reference/build-scripts.md) + * [Publishing on crates.io](reference/publishing.md) + * [Package ID Specifications](reference/pkgid-spec.md) + * [Source Replacement](reference/source-replacement.md) + * [External Tools](reference/external-tools.md) + * [Registries](reference/registries.md) + * [Unstable Features](reference/unstable.md) + +* [Cargo Commands](commands/index.md) + * [Build Commands](commands/build-commands.md) + * [bench](commands/cargo-bench.md) + * [build](commands/cargo-build.md) + * [check](commands/cargo-check.md) + * [clean](commands/cargo-clean.md) + * [doc](commands/cargo-doc.md) + * [fetch](commands/cargo-fetch.md) + * [fix](commands/cargo-fix.md) + * [run](commands/cargo-run.md) + * [rustc](commands/cargo-rustc.md) + * [rustdoc](commands/cargo-rustdoc.md) + * [test](commands/cargo-test.md) + * [Manifest Commands](commands/manifest-commands.md) + * [generate-lockfile](commands/cargo-generate-lockfile.md) + * [locate-project](commands/cargo-locate-project.md) + * [metadata](commands/cargo-metadata.md) + * [pkgid](commands/cargo-pkgid.md) + * [update](commands/cargo-update.md) + * [vendor](commands/cargo-vendor.md) + * [verify-project](commands/cargo-verify-project.md) + * [Package Commands](commands/package-commands.md) + * [init](commands/cargo-init.md) + * [install](commands/cargo-install.md) + * [new](commands/cargo-new.md) + * [search](commands/cargo-search.md) + * [uninstall](commands/cargo-uninstall.md) + * [Publishing Commands](commands/publishing-commands.md) + * [login](commands/cargo-login.md) + * [owner](commands/cargo-owner.md) + * [package](commands/cargo-package.md) + * [publish](commands/cargo-publish.md) + * [yank](commands/cargo-yank.md) + * [General Commands](commands/general-commands.md) + * [help](commands/cargo-help.md) + * [version](commands/cargo-version.md) + +* [FAQ](faq.md) +* [Appendix: Glossary](appendix/glossary.md) diff --git a/src/doc/src/appendix/glossary.md b/src/doc/src/appendix/glossary.md new file mode 100644 index 00000000000..4bd44c54c3c --- /dev/null +++ b/src/doc/src/appendix/glossary.md @@ -0,0 +1,198 @@ +# Glossary + +### Artifact + +An *artifact* is the file or set of files created as a result of the +compilation process. This includes linkable libraries and executable binaries. + +### Crate + +Every target in a package is a *crate*. Crates are either libraries or +executable binaries. It may loosely refer to either the source code of the +target, or the compiled artifact that the target produces. A crate may also +refer to a compressed package fetched from a registry. + +### Edition + +A *Rust edition* is a developmental landmark of the Rust language. The +[edition of a package][edition-field] is specified in the `Cargo.toml` +manifest, and individual targets can specify which edition they use. See the +[Edition Guide] for more information. + +### Feature + +The meaning of *feature* depends on the context: + +- A [*feature*][feature] is a named flag which allows for conditional + compilation. A feature can refer to an optional dependency, or an arbitrary + name defined in a `Cargo.toml` manifest that can be checked within source + code. + +- Cargo has [*unstable feature flags*][cargo-unstable] which can be used to + enable experimental behavior of Cargo itself. + +- The Rust compiler and Rustdoc have their own unstable feature flags (see + [The Unstable Book][unstable-book] and [The Rustdoc + Book][rustdoc-unstable]). + +- CPU targets have [*target features*][target-feature] which specify + capabilities of a CPU. + +### Index + +The index is the searchable list of crates in a registry. + +### Lock file + +The `Cargo.lock` *lock file* is a file that captures the exact version of +every dependency used in a workspace or package. It is automatically generated +by Cargo. See [Cargo.toml vs Cargo.lock]. + +### Manifest + +A [*manifest*][manifest] is a description of a package or a workspace in a +file named `Cargo.toml`. + +A [*virtual manifest*][virtual] is a `Cargo.toml` file that only describes a +workspace, and does not include a package. + +### Member + +A *member* is a package that belongs to a workspace. + +### Package + +A *package* is a collection of source files and a `Cargo.toml` manifest which +describes the package. A package has a name and version which is used for +specifying dependencies between packages. A package contains multiple targets, +which are either libraries or executable binaries. + +The *package root* is the directory where the package's `Cargo.toml` manifest +is located. + +The [*package ID specification*][pkgid-spec], or *SPEC*, is a string used to +uniquely reference a specific version of a package from a specific source. + +### Project + +Another name for a [package](#package). + +### Registry + +A *registry* is a service that contains a collection of downloadable crates +that can be installed or used as dependencies for a package. The default +registry is [crates.io](https://crates.io). The registry has an *index* which +contains a list of all crates, and tells Cargo how to download the crates that +are needed. + +### Source + +A *source* is a provider that contains crates that may be included as +dependencies for a package. There are several kinds of sources: + +- **Registry source** — See [registry](#registry). +- **Local registry source** — A set of crates stored as compressed files on + the filesystem. See [Local Registry Sources]. +- **Directory source** — A set of crates stored as uncompressed files on the + filesystem. See [Directory Sources]. +- **Path source** — An individual package located on the filesystem (such as a + [path dependency]) or a set of multiple packages (such as [path overrides]). +- **Git source** — Packages located in a git repository (such as a [git + dependency] or [git source]). + +See [Source Replacement] for more information. + +### Spec + +See [package ID specification](#package). + +### Target + +The meaning of the term *target* depends on the context: + +- **Cargo Target** — Cargo packages consist of *targets* which correspond to + artifacts that will be produced. Packages can have library, binary, example, + test, and benchmark targets. The [list of targets][targets] are configured + in the `Cargo.toml` manifest, often inferred automatically by the [directory + layout] of the source files. +- **Target Directory** — Cargo places all built artifacts and intermediate + files in the *target* directory. By default this is a directory named + `target` at the workspace root, or the package root if not using a + workspace. The directory may be changed with the `--target-dir` command-line + option, the `CARGO_TARGET_DIR` [environment variable], or the + `build.target-dir` [config option]. +- **Target Architecture** — The OS and machine architecture for the built + artifacts are typically referred to as a *target*. +- **Target Triple** — A triple is a specific format for specifying a target + architecture. Triples may be referred to as a *target triple* which is the + architecture for the artifact produced, and the *host triple* which is the + architecture that the compiler is running on. The target triple can be + specified with the `--target` command-line option or the `build.target` + [config option]. The general format of the triple is + `---` where: + + - `arch` = The base CPU architecture, for example `x86_64`, `i686`, `arm`, + `thumb`, `mips`, etc. + - `sub` = The CPU sub-architecture, for example `arm` has `v7`, `v7s`, + `v5te`, etc. + - `vendor` = The vendor, for example `unknown`, `apple`, `pc`, `linux`, etc. + - `sys` = The system name, for example `linux`, `windows`, etc. `none` is + typically used for bare-metal without an OS. + - `abi` = The ABI, for example `gnu`, `android`, `eabi`, etc. + + Some parameters may be omitted. Run `rustc --print target-list` for a list of + supported targets. + +### Test Targets + +Cargo *test targets* generate binaries which help verify proper operation and +correctness of code. There are two types of test artifacts: + +* **Unit test** — A *unit test* is an executable binary compiled directly from + a library or a binary target. It contains the entire contents of the library + or binary code, and runs `#[test]` annotated functions, intended to verify + individual units of code. +* **Integration test target** — An [*integration test + target*][integration-tests] is an executable binary compiled from a *test + target* which is a distinct crate whose source is located in the `tests` + directory or specified by the [`[[test]]` table][targets] in the + `Cargo.toml` manifest. It is intended to only test the public API of a + library, or execute a binary to verify its operation. + +### Workspace + +A [*workspace*][workspace] is a collection of one or more packages that share +common dependency resolution (with a shared `Cargo.lock`), output directory, +and various settings such as profiles. + +A [*virtual workspace*][virtual] is a workspace where the root `Cargo.toml` +manifest does not define a package, and only lists the workspace members. + +The *workspace root* is the directory where the workspace's `Cargo.toml` +manifest is located. + + +[Cargo.toml vs Cargo.lock]: ../guide/cargo-toml-vs-cargo-lock.md +[Directory Sources]: ../reference/source-replacement.md#directory-sources +[Local Registry Sources]: ../reference/source-replacement.md#local-registry-sources +[Source Replacement]: ../reference/source-replacement.md +[cargo-unstable]: ../reference/unstable.md +[config option]: ../reference/config.md +[directory layout]: ../reference/manifest.md#the-project-layout +[edition guide]: ../../edition-guide/index.html +[edition-field]: ../reference/manifest.md#the-edition-field-optional +[environment variable]: ../reference/environment-variables.md +[feature]: ../reference/manifest.md#the-features-section +[git dependency]: ../reference/specifying-dependencies.md#specifying-dependencies-from-git-repositories +[git source]: ../reference/source-replacement.md +[integration-tests]: ../reference/manifest.md#integration-tests +[manifest]: ../reference/manifest.md +[path dependency]: ../reference/specifying-dependencies.md#specifying-path-dependencies +[path overrides]: ../reference/specifying-dependencies.md#overriding-with-local-dependencies +[pkgid-spec]: ../reference/pkgid-spec.md +[rustdoc-unstable]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html +[target-feature]: ../../reference/attributes/codegen.html#the-target_feature-attribute +[targets]: ../reference/manifest.md#configuring-a-target +[unstable-book]: https://doc.rust-lang.org/nightly/unstable-book/index.html +[virtual]: ../reference/manifest.md#virtual-manifest +[workspace]: ../reference/manifest.md#the-workspace-section diff --git a/src/doc/src/commands/build-commands.md b/src/doc/src/commands/build-commands.md new file mode 100644 index 00000000000..f472651811a --- /dev/null +++ b/src/doc/src/commands/build-commands.md @@ -0,0 +1 @@ +# Build Commands diff --git a/src/doc/src/commands/cargo-bench.md b/src/doc/src/commands/cargo-bench.md new file mode 100644 index 00000000000..e840cbf6a2d --- /dev/null +++ b/src/doc/src/commands/cargo-bench.md @@ -0,0 +1,3 @@ +# cargo bench +{{#include command-common.html}} +{{#include ../../man/generated/cargo-bench.html}} diff --git a/src/doc/src/commands/cargo-build.md b/src/doc/src/commands/cargo-build.md new file mode 100644 index 00000000000..dd46f23ed39 --- /dev/null +++ b/src/doc/src/commands/cargo-build.md @@ -0,0 +1,3 @@ +# cargo build +{{#include command-common.html}} +{{#include ../../man/generated/cargo-build.html}} diff --git a/src/doc/src/commands/cargo-check.md b/src/doc/src/commands/cargo-check.md new file mode 100644 index 00000000000..473d00afe2d --- /dev/null +++ b/src/doc/src/commands/cargo-check.md @@ -0,0 +1,3 @@ +# cargo check +{{#include command-common.html}} +{{#include ../../man/generated/cargo-check.html}} diff --git a/src/doc/src/commands/cargo-clean.md b/src/doc/src/commands/cargo-clean.md new file mode 100644 index 00000000000..df321febee8 --- /dev/null +++ b/src/doc/src/commands/cargo-clean.md @@ -0,0 +1,3 @@ +# cargo clean +{{#include command-common.html}} +{{#include ../../man/generated/cargo-clean.html}} diff --git a/src/doc/src/commands/cargo-doc.md b/src/doc/src/commands/cargo-doc.md new file mode 100644 index 00000000000..9cdd897048d --- /dev/null +++ b/src/doc/src/commands/cargo-doc.md @@ -0,0 +1,3 @@ +# cargo doc +{{#include command-common.html}} +{{#include ../../man/generated/cargo-doc.html}} diff --git a/src/doc/src/commands/cargo-fetch.md b/src/doc/src/commands/cargo-fetch.md new file mode 100644 index 00000000000..bb5beda1e78 --- /dev/null +++ b/src/doc/src/commands/cargo-fetch.md @@ -0,0 +1,3 @@ +# cargo fetch +{{#include command-common.html}} +{{#include ../../man/generated/cargo-fetch.html}} diff --git a/src/doc/src/commands/cargo-fix.md b/src/doc/src/commands/cargo-fix.md new file mode 100644 index 00000000000..66503337a23 --- /dev/null +++ b/src/doc/src/commands/cargo-fix.md @@ -0,0 +1,3 @@ +# cargo fix +{{#include command-common.html}} +{{#include ../../man/generated/cargo-fix.html}} diff --git a/src/doc/src/commands/cargo-generate-lockfile.md b/src/doc/src/commands/cargo-generate-lockfile.md new file mode 100644 index 00000000000..57b70691138 --- /dev/null +++ b/src/doc/src/commands/cargo-generate-lockfile.md @@ -0,0 +1,3 @@ +# cargo generate-lockfile +{{#include command-common.html}} +{{#include ../../man/generated/cargo-generate-lockfile.html}} diff --git a/src/doc/src/commands/cargo-help.md b/src/doc/src/commands/cargo-help.md new file mode 100644 index 00000000000..7e1ba07307e --- /dev/null +++ b/src/doc/src/commands/cargo-help.md @@ -0,0 +1,3 @@ +# cargo help +{{#include command-common.html}} +{{#include ../../man/generated/cargo-help.html}} diff --git a/src/doc/src/commands/cargo-init.md b/src/doc/src/commands/cargo-init.md new file mode 100644 index 00000000000..044a8b12dca --- /dev/null +++ b/src/doc/src/commands/cargo-init.md @@ -0,0 +1,3 @@ +# cargo init +{{#include command-common.html}} +{{#include ../../man/generated/cargo-init.html}} diff --git a/src/doc/src/commands/cargo-install.md b/src/doc/src/commands/cargo-install.md new file mode 100644 index 00000000000..5fa11a60d57 --- /dev/null +++ b/src/doc/src/commands/cargo-install.md @@ -0,0 +1,3 @@ +# cargo install +{{#include command-common.html}} +{{#include ../../man/generated/cargo-install.html}} diff --git a/src/doc/src/commands/cargo-locate-project.md b/src/doc/src/commands/cargo-locate-project.md new file mode 100644 index 00000000000..0e42adfe404 --- /dev/null +++ b/src/doc/src/commands/cargo-locate-project.md @@ -0,0 +1,3 @@ +# cargo locate-project +{{#include command-common.html}} +{{#include ../../man/generated/cargo-locate-project.html}} diff --git a/src/doc/src/commands/cargo-login.md b/src/doc/src/commands/cargo-login.md new file mode 100644 index 00000000000..5feddeead3e --- /dev/null +++ b/src/doc/src/commands/cargo-login.md @@ -0,0 +1,3 @@ +# cargo login +{{#include command-common.html}} +{{#include ../../man/generated/cargo-login.html}} diff --git a/src/doc/src/commands/cargo-metadata.md b/src/doc/src/commands/cargo-metadata.md new file mode 100644 index 00000000000..273221395e7 --- /dev/null +++ b/src/doc/src/commands/cargo-metadata.md @@ -0,0 +1,3 @@ +# cargo metadata +{{#include command-common.html}} +{{#include ../../man/generated/cargo-metadata.html}} diff --git a/src/doc/src/commands/cargo-new.md b/src/doc/src/commands/cargo-new.md new file mode 100644 index 00000000000..3f1a490244d --- /dev/null +++ b/src/doc/src/commands/cargo-new.md @@ -0,0 +1,3 @@ +# cargo new +{{#include command-common.html}} +{{#include ../../man/generated/cargo-new.html}} diff --git a/src/doc/src/commands/cargo-owner.md b/src/doc/src/commands/cargo-owner.md new file mode 100644 index 00000000000..c1dbc9eb6c7 --- /dev/null +++ b/src/doc/src/commands/cargo-owner.md @@ -0,0 +1,3 @@ +# cargo owner +{{#include command-common.html}} +{{#include ../../man/generated/cargo-owner.html}} diff --git a/src/doc/src/commands/cargo-package.md b/src/doc/src/commands/cargo-package.md new file mode 100644 index 00000000000..8bfedcfa40d --- /dev/null +++ b/src/doc/src/commands/cargo-package.md @@ -0,0 +1,3 @@ +# cargo package +{{#include command-common.html}} +{{#include ../../man/generated/cargo-package.html}} diff --git a/src/doc/src/commands/cargo-pkgid.md b/src/doc/src/commands/cargo-pkgid.md new file mode 100644 index 00000000000..7481ad1c7af --- /dev/null +++ b/src/doc/src/commands/cargo-pkgid.md @@ -0,0 +1,3 @@ +# cargo pkgid +{{#include command-common.html}} +{{#include ../../man/generated/cargo-pkgid.html}} diff --git a/src/doc/src/commands/cargo-publish.md b/src/doc/src/commands/cargo-publish.md new file mode 100644 index 00000000000..db45e789908 --- /dev/null +++ b/src/doc/src/commands/cargo-publish.md @@ -0,0 +1,3 @@ +# cargo publish +{{#include command-common.html}} +{{#include ../../man/generated/cargo-publish.html}} diff --git a/src/doc/src/commands/cargo-run.md b/src/doc/src/commands/cargo-run.md new file mode 100644 index 00000000000..16d92815f09 --- /dev/null +++ b/src/doc/src/commands/cargo-run.md @@ -0,0 +1,3 @@ +# cargo run +{{#include command-common.html}} +{{#include ../../man/generated/cargo-run.html}} diff --git a/src/doc/src/commands/cargo-rustc.md b/src/doc/src/commands/cargo-rustc.md new file mode 100644 index 00000000000..ad6b3d98774 --- /dev/null +++ b/src/doc/src/commands/cargo-rustc.md @@ -0,0 +1,3 @@ +# cargo rustc +{{#include command-common.html}} +{{#include ../../man/generated/cargo-rustc.html}} diff --git a/src/doc/src/commands/cargo-rustdoc.md b/src/doc/src/commands/cargo-rustdoc.md new file mode 100644 index 00000000000..a8ebf1950ea --- /dev/null +++ b/src/doc/src/commands/cargo-rustdoc.md @@ -0,0 +1,3 @@ +# cargo rustdoc +{{#include command-common.html}} +{{#include ../../man/generated/cargo-rustdoc.html}} diff --git a/src/doc/src/commands/cargo-search.md b/src/doc/src/commands/cargo-search.md new file mode 100644 index 00000000000..b872d9628fb --- /dev/null +++ b/src/doc/src/commands/cargo-search.md @@ -0,0 +1,3 @@ +# cargo search +{{#include command-common.html}} +{{#include ../../man/generated/cargo-search.html}} diff --git a/src/doc/src/commands/cargo-test.md b/src/doc/src/commands/cargo-test.md new file mode 100644 index 00000000000..52fc969128d --- /dev/null +++ b/src/doc/src/commands/cargo-test.md @@ -0,0 +1,3 @@ +# cargo test +{{#include command-common.html}} +{{#include ../../man/generated/cargo-test.html}} diff --git a/src/doc/src/commands/cargo-uninstall.md b/src/doc/src/commands/cargo-uninstall.md new file mode 100644 index 00000000000..971ad3435df --- /dev/null +++ b/src/doc/src/commands/cargo-uninstall.md @@ -0,0 +1,3 @@ +# cargo uninstall +{{#include command-common.html}} +{{#include ../../man/generated/cargo-uninstall.html}} diff --git a/src/doc/src/commands/cargo-update.md b/src/doc/src/commands/cargo-update.md new file mode 100644 index 00000000000..2be849863f3 --- /dev/null +++ b/src/doc/src/commands/cargo-update.md @@ -0,0 +1,3 @@ +# cargo update +{{#include command-common.html}} +{{#include ../../man/generated/cargo-update.html}} diff --git a/src/doc/src/commands/cargo-vendor.md b/src/doc/src/commands/cargo-vendor.md new file mode 100644 index 00000000000..1e0f333fe6d --- /dev/null +++ b/src/doc/src/commands/cargo-vendor.md @@ -0,0 +1,4 @@ +# cargo vendor +{{#include command-common.html}} +{{#include ../../man/generated/cargo-vendor.html}} + diff --git a/src/doc/src/commands/cargo-verify-project.md b/src/doc/src/commands/cargo-verify-project.md new file mode 100644 index 00000000000..4a4c7668267 --- /dev/null +++ b/src/doc/src/commands/cargo-verify-project.md @@ -0,0 +1,3 @@ +# cargo verify-project +{{#include command-common.html}} +{{#include ../../man/generated/cargo-verify-project.html}} diff --git a/src/doc/src/commands/cargo-version.md b/src/doc/src/commands/cargo-version.md new file mode 100644 index 00000000000..12833ede4d5 --- /dev/null +++ b/src/doc/src/commands/cargo-version.md @@ -0,0 +1,3 @@ +# cargo version +{{#include command-common.html}} +{{#include ../../man/generated/cargo-version.html}} diff --git a/src/doc/src/commands/cargo-yank.md b/src/doc/src/commands/cargo-yank.md new file mode 100644 index 00000000000..d6ca7b3c6fd --- /dev/null +++ b/src/doc/src/commands/cargo-yank.md @@ -0,0 +1,3 @@ +# cargo yank +{{#include command-common.html}} +{{#include ../../man/generated/cargo-yank.html}} diff --git a/src/doc/src/commands/command-common.html b/src/doc/src/commands/command-common.html new file mode 100644 index 00000000000..93750600f27 --- /dev/null +++ b/src/doc/src/commands/command-common.html @@ -0,0 +1,18 @@ + diff --git a/src/doc/src/commands/general-commands.md b/src/doc/src/commands/general-commands.md new file mode 100644 index 00000000000..2fb4f7a9778 --- /dev/null +++ b/src/doc/src/commands/general-commands.md @@ -0,0 +1 @@ +# General Commands diff --git a/src/doc/src/commands/index.md b/src/doc/src/commands/index.md new file mode 100644 index 00000000000..a9ef0a2d231 --- /dev/null +++ b/src/doc/src/commands/index.md @@ -0,0 +1,3 @@ +# cargo +{{#include command-common.html}} +{{#include ../../man/generated/cargo.html}} diff --git a/src/doc/src/commands/manifest-commands.md b/src/doc/src/commands/manifest-commands.md new file mode 100644 index 00000000000..cd803bdc379 --- /dev/null +++ b/src/doc/src/commands/manifest-commands.md @@ -0,0 +1 @@ +# Manifest Commands diff --git a/src/doc/src/commands/package-commands.md b/src/doc/src/commands/package-commands.md new file mode 100644 index 00000000000..f528a2571d0 --- /dev/null +++ b/src/doc/src/commands/package-commands.md @@ -0,0 +1 @@ +# Package Commands diff --git a/src/doc/src/commands/publishing-commands.md b/src/doc/src/commands/publishing-commands.md new file mode 100644 index 00000000000..e32bf3c16f9 --- /dev/null +++ b/src/doc/src/commands/publishing-commands.md @@ -0,0 +1 @@ +# Publishing Commands diff --git a/src/doc/faq.md b/src/doc/src/faq.md similarity index 54% rename from src/doc/faq.md rename to src/doc/src/faq.md index fc1d9416ebc..0aa179454e1 100644 --- a/src/doc/faq.md +++ b/src/doc/src/faq.md @@ -1,24 +1,24 @@ -% Frequently Asked Questions - Cargo Documentation +## Frequently Asked Questions -# Is the plan to use Github as a package repository? +### Is the plan to use GitHub as a package repository? -No. The plan for Cargo is to use crates.io, like npm or Rubygems do with +No. The plan for Cargo is to use [crates.io], like npm or Rubygems do with npmjs.org and rubygems.org. We plan to support git repositories as a source of packages forever, because they can be used for early development and temporary patches, even when people use the registry as the primary source of packages. -# Why build crates.io rather than use Github as a registry? +### Why build crates.io rather than use GitHub as a registry? -We think that it's very important to support multiple ways to download -packages, including downloading from Github and copying packages into -your project itself. +We think that it’s very important to support multiple ways to download +packages, including downloading from GitHub and copying packages into +your package itself. -That said, we think that crates.io offers a number of important benefits, and +That said, we think that [crates.io] offers a number of important benefits, and will likely become the primary way that people download packages in Cargo. -For precedent, both Node.js's [npm][1] and Ruby's [bundler][2] support both a +For precedent, both Node.js’s [npm][1] and Ruby’s [bundler][2] support both a central registry model as well as a Git-based model, and most packages are downloaded through the registry in those ecosystems, with an important minority of packages making use of git-based packages. @@ -42,23 +42,23 @@ languages include: down fast. Also remember that not everybody has a high-speed, low-latency Internet connection. -# Will Cargo work with C code (or other languages)? +### Will Cargo work with C code (or other languages)? Yes! -Cargo handles compiling Rust code, but we know that many Rust projects +Cargo handles compiling Rust code, but we know that many Rust packages link against C code. We also know that there are decades of tooling built up around compiling languages other than Rust. -Our solution: Cargo allows a package to [specify a script](build-script.html) +Our solution: Cargo allows a package to [specify a script](reference/build-scripts.md) (written in Rust) to run before invoking `rustc`. Rust is leveraged to implement platform-specific configuration and refactor out common build functionality among packages. -# Can Cargo be used inside of `make` (or `ninja`, or ...) +### Can Cargo be used inside of `make` (or `ninja`, or ...) Indeed. While we intend Cargo to be useful as a standalone way to -compile Rust projects at the top-level, we know that some people will +compile Rust packages at the top-level, we know that some people will want to invoke Cargo from other build tools. We have designed Cargo to work well in those contexts, paying attention @@ -67,23 +67,23 @@ have some work to do on those fronts, but using Cargo in the context of conventional scripts is something we designed for from the beginning and will continue to prioritize. -# Does Cargo handle multi-platform projects or cross-compilation? +### Does Cargo handle multi-platform packages or cross-compilation? Rust itself provides facilities for configuring sections of code based on the platform. Cargo also supports [platform-specific dependencies][target-deps], and we plan to support more per-platform configuration in `Cargo.toml` in the future. -[target-deps]: manifest.html#the-[dependencies.*]-sections +[target-deps]: reference/specifying-dependencies.md#platform-specific-dependencies -In the longer-term, we're looking at ways to conveniently cross-compile -projects using Cargo. +In the longer-term, we’re looking at ways to conveniently cross-compile +packages using Cargo. -# Does Cargo support environments, like `production` or `test`? +### Does Cargo support environments, like `production` or `test`? We support environments through the use of [profiles][profile] to support: -[profile]: manifest.html#the-[profile.*]-sections +[profile]: reference/manifest.md#the-profile-sections * environment-specific flags (like `-g --opt-level=0` for development and `--opt-level=3` for production). @@ -91,7 +91,7 @@ We support environments through the use of [profiles][profile] to support: * environment-specific `#[cfg]` * a `cargo test` command -# Does Cargo work on Windows? +### Does Cargo work on Windows? Yes! @@ -101,41 +101,44 @@ issue][3]. [3]: https://github.com/rust-lang/cargo/issues -# Why do binaries have `Cargo.lock` in version control, but not libraries? +### Why do binaries have `Cargo.lock` in version control, but not libraries? The purpose of a `Cargo.lock` is to describe the state of the world at the time of a successful build. It is then used to provide deterministic builds across -whatever machine is building the project by ensuring that the exact same +whatever machine is building the package by ensuring that the exact same dependencies are being compiled. -This property is most desirable from applications and projects which are at the +This property is most desirable from applications and packages which are at the very end of the dependency chain (binaries). As a result, it is recommended that all binaries check in their `Cargo.lock`. For libraries the situation is somewhat different. A library is not only used by the library developers, but also any downstream consumers of the library. Users -dependent on the library will not inspect the library's `Cargo.lock` (even if it +dependent on the library will not inspect the library’s `Cargo.lock` (even if it exists). This is precisely because a library should **not** be deterministically recompiled for all users of the library. -If a library ends up being used transitively by several dependencies, it's +If a library ends up being used transitively by several dependencies, it’s likely that just a single copy of the library is desired (based on semver -compatibility). If all libraries were to check in their `Cargo.lock`, then -multiple copies of the library would be used, and perhaps even a version +compatibility). If Cargo used all of the dependencies' `Cargo.lock` files, +then multiple copies of the library could be used, and perhaps even a version conflict. In other words, libraries specify semver requirements for their dependencies but cannot see the full picture. Only end products like binaries have a full picture to decide what versions of dependencies should be used. -# Can libraries use `*` as a version for their dependencies? +### Can libraries use `*` as a version for their dependencies? -While they _can_, strictly speaking, they should not. A version requirement +**As of January 22nd, 2016, [crates.io] rejects all packages (not just libraries) +with wildcard dependency constraints.** + +While libraries _can_, strictly speaking, they should not. A version requirement of `*` says “This will work with every version ever,” which is never going -to be true. Libraries should always specifiy the range that they do work with, +to be true. Libraries should always specify the range that they do work with, even if it’s something as general as “every 1.x.y version.” -# Why `Cargo.toml`? +### Why `Cargo.toml`? As one of the most frequent interactions with Cargo, the question of why the configuration file is named `Cargo.toml` arises from time to time. The leading @@ -150,3 +153,41 @@ Cargo does not allow other names such as `cargo.toml` or `Cargofile` to emphasize the ease of how a Cargo repository can be identified. An option of many possible names has historically led to confusion where one case was handled but others were accidentally forgotten. + +[crates.io]: https://crates.io/ + +### How can Cargo work offline? + +Cargo is often used in situations with limited or no network access such as +airplanes, CI environments, or embedded in large production deployments. Users +are often surprised when Cargo attempts to fetch resources from the network, and +hence the request for Cargo to work offline comes up frequently. + +Cargo, at its heart, will not attempt to access the network unless told to do +so. That is, if no crates comes from crates.io, a git repository, or some other +network location, Cargo will never attempt to make a network connection. As a +result, if Cargo attempts to touch the network, then it's because it needs to +fetch a required resource. + +Cargo is also quite aggressive about caching information to minimize the amount +of network activity. It will guarantee, for example, that if `cargo build` (or +an equivalent) is run to completion then the next `cargo build` is guaranteed to +not touch the network so long as `Cargo.toml` has not been modified in the +meantime. This avoidance of the network boils down to a `Cargo.lock` existing +and a populated cache of the crates reflected in the lock file. If either of +these components are missing, then they're required for the build to succeed and +must be fetched remotely. + +As of Rust 1.11.0 Cargo understands a new flag, `--frozen`, which is an +assertion that it shouldn't touch the network. When passed, Cargo will +immediately return an error if it would otherwise attempt a network request. +The error should include contextual information about why the network request is +being made in the first place to help debug as well. Note that this flag *does +not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch +the network as a previous command has been run to ensure that network activity +shouldn't be necessary. + +For more information about vendoring, see documentation on [source +replacement][replace]. + +[replace]: reference/source-replacement.md diff --git a/src/doc/src/getting-started/first-steps.md b/src/doc/src/getting-started/first-steps.md new file mode 100644 index 00000000000..9c0a46b4071 --- /dev/null +++ b/src/doc/src/getting-started/first-steps.md @@ -0,0 +1,73 @@ +## First Steps with Cargo + +To start a new package with Cargo, use `cargo new`: + +```console +$ cargo new hello_world +``` + +Cargo defaults to `--bin` to make a binary program. To make a library, we'd +pass `--lib`. + +Let’s check out what Cargo has generated for us: + +```console +$ cd hello_world +$ tree . +. +├── Cargo.toml +└── src + └── main.rs + +1 directory, 2 files +``` + +This is all we need to get started. First, let’s check out `Cargo.toml`: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +edition = "2018" + +[dependencies] +``` + +This is called a **manifest**, and it contains all of the metadata that Cargo +needs to compile your package. + +Here’s what’s in `src/main.rs`: + +```rust +fn main() { + println!("Hello, world!"); +} +``` + +Cargo generated a “hello world” for us. Let’s compile it: + +```console +$ cargo build + Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) +``` + +And then run it: + +```console +$ ./target/debug/hello_world +Hello, world! +``` + +We can also use `cargo run` to compile and then run it, all in one step: + +```console +$ cargo run + Fresh hello_world v0.1.0 (file:///path/to/package/hello_world) + Running `target/hello_world` +Hello, world! +``` + +### Going further + +For more details on using Cargo, check out the [Cargo Guide](../guide/index.md) diff --git a/src/doc/src/getting-started/index.md b/src/doc/src/getting-started/index.md new file mode 100644 index 00000000000..ed775db70b6 --- /dev/null +++ b/src/doc/src/getting-started/index.md @@ -0,0 +1,6 @@ +## Getting Started + +To get started with Cargo, install Cargo (and Rust) and set up your first crate. + +* [Installation](installation.md) +* [First steps with Cargo](first-steps.md) diff --git a/src/doc/src/getting-started/installation.md b/src/doc/src/getting-started/installation.md new file mode 100644 index 00000000000..d7e55db1975 --- /dev/null +++ b/src/doc/src/getting-started/installation.md @@ -0,0 +1,37 @@ +## Installation + +### Install Rust and Cargo + +The easiest way to get Cargo is to install the current stable release of [Rust] +by using `rustup`. Installing Rust using `rustup` will also install `cargo`. + +On Linux and macOS systems, this is done as follows: + +```console +$ curl https://sh.rustup.rs -sSf | sh +``` + +It will download a script, and start the installation. If everything goes well, +you’ll see this appear: + +```console +Rust is installed now. Great! +``` + +On Windows, download and run [rustup-init.exe]. It will start the installation +in a console and present the above message on success. + +After this, you can use the `rustup` command to also install `beta` or `nightly` +channels for Rust and Cargo. + +For other installation options and information, visit the +[install][install-rust] page of the Rust website. + +### Build and Install Cargo from Source + +Alternatively, you can [build Cargo from source][compiling-from-source]. + +[rust]: https://www.rust-lang.org/ +[rustup-init.exe]: https://win.rustup.rs/ +[install-rust]: https://www.rust-lang.org/tools/install +[compiling-from-source]: https://github.com/rust-lang/cargo#compiling-from-source diff --git a/src/doc/src/guide/build-cache.md b/src/doc/src/guide/build-cache.md new file mode 100644 index 00000000000..d253b8acc4d --- /dev/null +++ b/src/doc/src/guide/build-cache.md @@ -0,0 +1,14 @@ +## Build cache + +Cargo shares build artifacts among all the packages of a single workspace. +Today, Cargo does not share build results across different workspaces, but +a similar result can be achieved by using a third party tool, [sccache]. + +To setup `sccache`, install it with `cargo install sccache` and set +`RUSTC_WRAPPER` environmental variable to `sccache` before invoking Cargo. +If you use bash, it makes sense to add `export RUSTC_WRAPPER=sccache` to +`.bashrc`. Refer to sccache documentation for more details. + +[sccache]: https://github.com/mozilla/sccache + + diff --git a/src/doc/src/guide/cargo-toml-vs-cargo-lock.md b/src/doc/src/guide/cargo-toml-vs-cargo-lock.md new file mode 100644 index 00000000000..12711247830 --- /dev/null +++ b/src/doc/src/guide/cargo-toml-vs-cargo-lock.md @@ -0,0 +1,103 @@ +## Cargo.toml vs Cargo.lock + +`Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk +about them, here’s a summary: + +* `Cargo.toml` is about describing your dependencies in a broad sense, and is + written by you. +* `Cargo.lock` contains exact information about your dependencies. It is + maintained by Cargo and should not be manually edited. + +If you’re building a non-end product, such as a rust library that other rust packages will depend on, put +`Cargo.lock` in your `.gitignore`. If you’re building an end product, which are executable +like command-line tool or an application, or a system library with crate-type of `staticlib` or `cdylib`, +check `Cargo.lock` into `git`. If you're curious about why that is, see +["Why do binaries have `Cargo.lock` in version control, but not libraries?" in the +FAQ](../faq.md#why-do-binaries-have-cargolock-in-version-control-but-not-libraries). + +Let’s dig in a little bit more. + +`Cargo.toml` is a **manifest** file in which we can specify a bunch of +different metadata about our package. For example, we can say that we depend +on another package: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git" } +``` + +This package has a single dependency, on the `rand` library. We’ve stated in +this case that we’re relying on a particular Git repository that lives on +GitHub. Since we haven’t specified any other information, Cargo assumes that +we intend to use the latest commit on the `master` branch to build our package. + +Sound good? Well, there’s one problem: If you build this package today, and +then you send a copy to me, and I build this package tomorrow, something bad +could happen. There could be more commits to `rand` in the meantime, and my +build would include new commits while yours would not. Therefore, we would +get different builds. This would be bad because we want reproducible builds. + +We could fix this problem by putting a `rev` line in our `Cargo.toml`: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git", rev = "9f35b8e" } +``` + +Now our builds will be the same. But there’s a big drawback: now we have to +manually think about SHA-1s every time we want to update our library. This is +both tedious and error prone. + +Enter the `Cargo.lock`. Because of its existence, we don’t need to manually +keep track of the exact revisions: Cargo will do it for us. When we have a +manifest like this: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] + +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand.git" } +``` + +Cargo will take the latest commit and write that information out into our +`Cargo.lock` when we build for the first time. That file will look like this: + +```toml +[[package]] +name = "hello_world" +version = "0.1.0" +dependencies = [ + "rand 0.1.0 (git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9)", +] + +[[package]] +name = "rand" +version = "0.1.0" +source = "git+https://github.com/rust-lang-nursery/rand.git#9f35b8e439eeedd60b9414c58f389bdc6a3284f9" +``` + +You can see that there’s a lot more information here, including the exact +revision we used to build. Now when you give your package to someone else, +they’ll use the exact same SHA, even though we didn’t specify it in our +`Cargo.toml`. + +When we’re ready to opt in to a new version of the library, Cargo can +re-calculate the dependencies and update things for us: + +```console +$ cargo update # updates all dependencies +$ cargo update -p rand # updates just “rand” +``` + +This will write out a new `Cargo.lock` with the new version information. Note +that the argument to `cargo update` is actually a +[Package ID Specification](../reference/pkgid-spec.md) and `rand` is just a short +specification. diff --git a/src/doc/src/guide/continuous-integration.md b/src/doc/src/guide/continuous-integration.md new file mode 100644 index 00000000000..25d2e385154 --- /dev/null +++ b/src/doc/src/guide/continuous-integration.md @@ -0,0 +1,88 @@ +## Continuous Integration + +### Travis CI + +To test your package on Travis CI, here is a sample `.travis.yml` file: + +```yaml +language: rust +rust: + - stable + - beta + - nightly +matrix: + allow_failures: + - rust: nightly +``` + +This will test all three release channels, but any breakage in nightly +will not fail your overall build. Please see the [Travis CI Rust +documentation](https://docs.travis-ci.com/user/languages/rust/) for more +information. + +### GitLab CI + +To test your package on GitLab CI, here is a sample `.gitlab-ci.yml` file: + +```yaml +stages: + - build + +rust-latest: + stage: build + image: rust:latest + script: + - cargo build --verbose + - cargo test --verbose + +rust-nightly: + stage: build + image: rustlang/rust:nightly + script: + - cargo build --verbose + - cargo test --verbose + allow_failure: true +``` + +This will test on the stable channel and nightly channel, but any +breakage in nightly will not fail your overall build. Please see the +[GitLab CI](https://docs.gitlab.com/ce/ci/yaml/README.html) for more +information. + +### builds.sr.ht + +To test your package on sr.ht, here is a sample `.build.yml` file. +Be sure to change `` and `` to the repo to clone and +the directory where it was cloned. + +```yaml +image: archlinux +packages: + - rustup +sources: + - +tasks: + - setup: | + rustup toolchain install nightly stable + cd / + rustup run stable cargo fetch + - stable: | + rustup default stable + cd / + cargo build --verbose + cargo test --verbose + - nightly: | + rustup default nightly + cd / + cargo build --verbose ||: + cargo test --verbose ||: + - docs: | + cd / + rustup run stable cargo doc --no-deps + rustup run nightly cargo doc --no-deps ||: +``` + +This will test and build documentation on the stable channel and nightly +channel, but any breakage in nightly will not fail your overall build. Please +see the [builds.sr.ht documentation](https://man.sr.ht/builds.sr.ht/) for more +information. diff --git a/src/doc/src/guide/creating-a-new-project.md b/src/doc/src/guide/creating-a-new-project.md new file mode 100644 index 00000000000..92a97a2d6e9 --- /dev/null +++ b/src/doc/src/guide/creating-a-new-project.md @@ -0,0 +1,91 @@ +## Creating a New Package + +To start a new package with Cargo, use `cargo new`: + +```console +$ cargo new hello_world --bin +``` + +We’re passing `--bin` because we’re making a binary program: if we +were making a library, we’d pass `--lib`. This also initializes a new `git` +repository by default. If you don't want it to do that, pass `--vcs none`. + +Let’s check out what Cargo has generated for us: + +```console +$ cd hello_world +$ tree . +. +├── Cargo.toml +└── src + └── main.rs + +1 directory, 2 files +``` + +Let’s take a closer look at `Cargo.toml`: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +edition = "2018" + +[dependencies] + +``` + +This is called a **manifest**, and it contains all of the metadata that Cargo +needs to compile your package. + +Here’s what’s in `src/main.rs`: + +```rust +fn main() { + println!("Hello, world!"); +} +``` + +Cargo generated a “hello world” for us. Let’s compile it: + +```console +$ cargo build + Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) +``` + +And then run it: + +```console +$ ./target/debug/hello_world +Hello, world! +``` + +We can also use `cargo run` to compile and then run it, all in one step (You +won't see the `Compiling` line if you have not made any changes since you last +compiled): + +```console +$ cargo run + Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) + Running `target/debug/hello_world` +Hello, world! +``` + +You’ll now notice a new file, `Cargo.lock`. It contains information about our +dependencies. Since we don’t have any yet, it’s not very interesting. + +Once you’re ready for release, you can use `cargo build --release` to compile +your files with optimizations turned on: + +```console +$ cargo build --release + Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) +``` + +`cargo build --release` puts the resulting binary in `target/release` instead of +`target/debug`. + +Compiling in debug mode is the default for development-- compilation time is +shorter since the compiler doesn't do optimizations, but the code will run +slower. Release mode takes longer to compile, but the code will run faster. diff --git a/src/doc/src/guide/dependencies.md b/src/doc/src/guide/dependencies.md new file mode 100644 index 00000000000..a6d129a359d --- /dev/null +++ b/src/doc/src/guide/dependencies.md @@ -0,0 +1,89 @@ +## Dependencies + +[crates.io] is the Rust community's central package registry that serves as a +location to discover and download packages. `cargo` is configured to use it by +default to find requested packages. + +To depend on a library hosted on [crates.io], add it to your `Cargo.toml`. + +[crates.io]: https://crates.io/ + +### Adding a dependency + +If your `Cargo.toml` doesn't already have a `[dependencies]` section, add that, +then list the crate name and version that you would like to use. This example +adds a dependency of the `time` crate: + +```toml +[dependencies] +time = "0.1.12" +``` + +The version string is a [semver] version requirement. The [specifying +dependencies](../reference/specifying-dependencies.md) docs have more information about +the options you have here. + +[semver]: https://github.com/steveklabnik/semver#requirements + +If we also wanted to add a dependency on the `regex` crate, we would not need +to add `[dependencies]` for each crate listed. Here's what your whole +`Cargo.toml` file would look like with dependencies on the `time` and `regex` +crates: + +```toml +[package] +name = "hello_world" +version = "0.1.0" +authors = ["Your Name "] +edition = "2018" + +[dependencies] +time = "0.1.12" +regex = "0.1.41" +``` + +Re-run `cargo build`, and Cargo will fetch the new dependencies and all of +their dependencies, compile them all, and update the `Cargo.lock`: + +```console +$ cargo build + Updating crates.io index + Downloading memchr v0.1.5 + Downloading libc v0.1.10 + Downloading regex-syntax v0.2.1 + Downloading memchr v0.1.5 + Downloading aho-corasick v0.3.0 + Downloading regex v0.1.41 + Compiling memchr v0.1.5 + Compiling libc v0.1.10 + Compiling regex-syntax v0.2.1 + Compiling memchr v0.1.5 + Compiling aho-corasick v0.3.0 + Compiling regex v0.1.41 + Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) +``` + +Our `Cargo.lock` contains the exact information about which revision of all of +these dependencies we used. + +Now, if `regex` gets updated, we will still build with the same revision until +we choose to `cargo update`. + +You can now use the `regex` library in `main.rs`. + +```rust +use regex::Regex; + +fn main() { + let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap(); + println!("Did our date match? {}", re.is_match("2014-01-01")); +} +``` + +Running it will show: + +```console +$ cargo run + Running `target/hello_world` +Did our date match? true +``` diff --git a/src/doc/src/guide/index.md b/src/doc/src/guide/index.md new file mode 100644 index 00000000000..f40e25a7beb --- /dev/null +++ b/src/doc/src/guide/index.md @@ -0,0 +1,14 @@ +## Cargo Guide + +This guide will give you all that you need to know about how to use Cargo to +develop Rust packages. + +* [Why Cargo Exists](why-cargo-exists.md) +* [Creating a New Package](creating-a-new-project.md) +* [Working on an Existing Cargo Package](working-on-an-existing-project.md) +* [Dependencies](dependencies.md) +* [Package Layout](project-layout.md) +* [Cargo.toml vs Cargo.lock](cargo-toml-vs-cargo-lock.md) +* [Tests](tests.md) +* [Continuous Integration](continuous-integration.md) +* [Build Cache](build-cache.md) diff --git a/src/doc/src/guide/project-layout.md b/src/doc/src/guide/project-layout.md new file mode 100644 index 00000000000..6c9f4a7719c --- /dev/null +++ b/src/doc/src/guide/project-layout.md @@ -0,0 +1,35 @@ +## Package Layout + +Cargo uses conventions for file placement to make it easy to dive into a new +Cargo package: + +``` +. +├── Cargo.lock +├── Cargo.toml +├── benches +│   └── large-input.rs +├── examples +│   └── simple.rs +├── src +│   ├── bin +│   │   └── another_executable.rs +│   ├── lib.rs +│   └── main.rs +└── tests + └── some-integration-tests.rs +``` + +* `Cargo.toml` and `Cargo.lock` are stored in the root of your package (*package + root*). +* Source code goes in the `src` directory. +* The default library file is `src/lib.rs`. +* The default executable file is `src/main.rs`. +* Other executables can be placed in `src/bin/*.rs`. +* Integration tests go in the `tests` directory (unit tests go in each file + they're testing). +* Examples go in the `examples` directory. +* Benchmarks go in the `benches` directory. + +These are explained in more detail in the [manifest +description](../reference/manifest.md#the-project-layout). diff --git a/src/doc/src/guide/tests.md b/src/doc/src/guide/tests.md new file mode 100644 index 00000000000..cebba75c2b3 --- /dev/null +++ b/src/doc/src/guide/tests.md @@ -0,0 +1,39 @@ +## Tests + +Cargo can run your tests with the `cargo test` command. Cargo looks for tests +to run in two places: in each of your `src` files and any tests in `tests/`. +Tests in your `src` files should be unit tests, and tests in `tests/` should be +integration-style tests. As such, you’ll need to import your crates into +the files in `tests`. + +Here's an example of running `cargo test` in our package, which currently has +no tests: + +```console +$ cargo test + Compiling rand v0.1.0 (https://github.com/rust-lang-nursery/rand.git#9f35b8e) + Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) + Running target/test/hello_world-9c2b65bbb79eabce + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out +``` + +If our package had tests, we would see more output with the correct number of +tests. + +You can also run a specific test by passing a filter: + +```console +$ cargo test foo +``` + +This will run any test with `foo` in its name. + +`cargo test` runs additional checks as well. For example, it will compile any +examples you’ve included and will also test the examples in your +documentation. Please see the [testing guide][testing] in the Rust +documentation for more details. + +[testing]: ../../book/ch11-00-testing.html diff --git a/src/doc/src/guide/why-cargo-exists.md b/src/doc/src/guide/why-cargo-exists.md new file mode 100644 index 00000000000..215bbceeb4b --- /dev/null +++ b/src/doc/src/guide/why-cargo-exists.md @@ -0,0 +1,12 @@ +## Why Cargo Exists + +Cargo is a tool that allows Rust packages to declare their various +dependencies and ensure that you’ll always get a repeatable build. + +To accomplish this goal, Cargo does four things: + +* Introduces two metadata files with various bits of package information. +* Fetches and builds your package’s dependencies. +* Invokes `rustc` or another build tool with the correct parameters to build + your package. +* Introduces conventions to make working with Rust packages easier. diff --git a/src/doc/src/guide/working-on-an-existing-project.md b/src/doc/src/guide/working-on-an-existing-project.md new file mode 100644 index 00000000000..ff5a31f2dd7 --- /dev/null +++ b/src/doc/src/guide/working-on-an-existing-project.md @@ -0,0 +1,22 @@ +## Working on an Existing Cargo Package + +If you download an existing package that uses Cargo, it’s really easy +to get going. + +First, get the package from somewhere. In this example, we’ll use `rand` +cloned from its repository on GitHub: + +```console +$ git clone https://github.com/rust-lang-nursery/rand.git +$ cd rand +``` + +To build, use `cargo build`: + +```console +$ cargo build + Compiling rand v0.1.0 (file:///path/to/package/rand) +``` + +This will fetch all of the dependencies and then build them, along with the +package. diff --git a/src/doc/src/images/Cargo-Logo-Small.png b/src/doc/src/images/Cargo-Logo-Small.png new file mode 100644 index 00000000000..e3a99208c28 Binary files /dev/null and b/src/doc/src/images/Cargo-Logo-Small.png differ diff --git a/src/doc/src/images/auth-level-acl.png b/src/doc/src/images/auth-level-acl.png new file mode 100644 index 00000000000..e7bc25180dc Binary files /dev/null and b/src/doc/src/images/auth-level-acl.png differ diff --git a/src/doc/src/images/org-level-acl.png b/src/doc/src/images/org-level-acl.png new file mode 100644 index 00000000000..ed5aa882a3b Binary files /dev/null and b/src/doc/src/images/org-level-acl.png differ diff --git a/src/doc/src/index.md b/src/doc/src/index.md new file mode 100644 index 00000000000..66cb784f49c --- /dev/null +++ b/src/doc/src/index.md @@ -0,0 +1,30 @@ +# The Cargo Book + +![Cargo Logo](images/Cargo-Logo-Small.png) + +Cargo is the [Rust] *package manager*. Cargo downloads your Rust package’s +dependencies, compiles your packages, makes distributable packages, and uploads them to +[crates.io], the Rust community’s *package registry*. You can contribute +to this book on [GitHub]. + + +### Sections + +**[Getting Started](getting-started/index.md)** + +To get started with Cargo, install Cargo (and Rust) and set up your first crate. + +**[Cargo Guide](guide/index.md)** + +The guide will give you all you need to know about how to use Cargo to develop +Rust packages. + +**[Cargo Reference](reference/index.md)** + +The reference covers the details of various areas of Cargo. + +**[Frequently Asked Questions](faq.md)** + +[rust]: https://www.rust-lang.org/ +[crates.io]: https://crates.io/ +[GitHub]: https://github.com/rust-lang/cargo/tree/master/src/doc/src diff --git a/src/doc/build-script.md b/src/doc/src/reference/build-scripts.md similarity index 55% rename from src/doc/build-script.md rename to src/doc/src/reference/build-scripts.md index c4b7ec87592..9d1c9f4062a 100644 --- a/src/doc/build-script.md +++ b/src/doc/src/reference/build-scripts.md @@ -1,4 +1,4 @@ -% Build Script Support - Cargo Documentation +## Build Scripts Some packages need to compile third-party non-Rust code, for example C libraries. Other packages need to link to C libraries which can either be @@ -18,9 +18,10 @@ build = "build.rs" The Rust file designated by the `build` command (relative to the package root) will be compiled and invoked before anything else is compiled in the package, -allowing your Rust code to depend on the built or generated artifacts. Note -that there is no default value for `build`, it must be explicitly specified if -required. +allowing your Rust code to depend on the built or generated artifacts. +By default Cargo looks for a `"build.rs"` file in a package root (even if you +do not specify a value for `build`). Use `build = "custom_build_name.rs"` to specify +a custom build name or `build = false` to disable automatic detection of the build script. Some example use cases of the build command are: @@ -32,64 +33,122 @@ Some example use cases of the build command are: Each of these use cases will be detailed in full below to give examples of how the build command works. -## Inputs to the Build Script +### Inputs to the Build Script When the build script is run, there are a number of inputs to the build script, all passed in the form of [environment variables][env]. -In addition to environment variables, the build script's current directory is -the source directory of the build script's package. +In addition to environment variables, the build script’s current directory is +the source directory of the build script’s package. -[env]: environment-variables.html +[env]: environment-variables.md -## Outputs of the Build Script +### Outputs of the Build Script -All the lines printed to stdout by a build script that start with `cargo:` -are interpreted by Cargo and must be of the form `key=value`. +All the lines printed to stdout by a build script are written to a file like +`target/debug/build//output` (the precise location may depend on your +configuration). If you would like to see such output directly in your terminal, +invoke cargo as 'very verbose' with the `-vv` flag. Note that if neither the +build script nor package source files are modified, subsequent calls to +cargo with `-vv` will **not** print output to the terminal because a +new build is not executed. Run `cargo clean` before each cargo invocation +if you want to ensure that output is always displayed on your terminal. +Any line that starts with `cargo:` is interpreted directly by Cargo. +This line must be of the form `cargo:key=value`, like the examples below: -Example output: - -```notrust +``` +# specially recognized by Cargo cargo:rustc-link-lib=static=foo cargo:rustc-link-search=native=/path/to/foo cargo:rustc-cfg=foo +cargo:rustc-env=FOO=bar +cargo:rustc-cdylib-link-arg=-Wl,-soname,libfoo.so.1.2.3 +# arbitrary user-defined metadata cargo:root=/path/to/foo cargo:libdir=/path/to/foo/lib cargo:include=/path/to/foo/include ``` -There are a few special keys that Cargo recognizes, affecting how the crate this -build script is for is built: - -* `rustc-link-lib` indicates that the specified value should be passed to the - compiler as a `-l` flag. -* `rustc-link-search` indicates the specified value should be passed to the - compiler as a `-L` flag. -* `rustc-cfg` indicates that the specified directive will be passed as a `--cfg` - flag to the compiler. This is often useful for performing compile-time +On the other hand, lines printed to stderr are written to a file like +`target/debug/build//stderr` but are not interpreted by cargo. + +There are a few special keys that Cargo recognizes, some affecting how the +crate is built: + +* `rustc-link-lib=[KIND=]NAME` indicates that the specified value is a library + name and should be passed to the compiler as a `-l` flag. The optional `KIND` + can be one of `static`, `dylib` (the default), or `framework`, see + `rustc --help` for more details. +* `rustc-link-search=[KIND=]PATH` indicates the specified value is a library + search path and should be passed to the compiler as a `-L` flag. The optional + `KIND` can be one of `dependency`, `crate`, `native`, `framework` or `all` + (the default), see `rustc --help` for more details. +* `rustc-flags=FLAGS` is a set of flags passed to the compiler, only `-l` and + `-L` flags are supported. +* `rustc-cfg=FEATURE` indicates that the specified feature will be passed as a + `--cfg` flag to the compiler. This is often useful for performing compile-time detection of various features. +* `rustc-env=VAR=VALUE` indicates that the specified environment variable + will be added to the environment which the compiler is run within. + The value can be then retrieved by the `env!` macro in the compiled crate. + This is useful for embedding additional metadata in crate's code, + such as the hash of Git HEAD or the unique identifier of a continuous + integration server. +* `rustc-cdylib-link-arg=FLAG` is a flag passed to the compiler as + `-C link-arg=FLAG` when building a `cdylib`. Its usage is highly platform + specific. It is useful to set the shared library version or the runtime-path. +* `rerun-if-changed=PATH` is a path to a file or directory which indicates that + the build script should be re-run if it changes (detected by a more-recent + last-modified timestamp on the file). Normally build scripts are re-run if + any file inside the crate root changes, but this can be used to scope changes + to just a small set of files. (If this path points to a directory the entire + directory will not be traversed for changes -- only changes to the timestamp + of the directory itself (which corresponds to some types of changes within the + directory, depending on platform) will trigger a rebuild. To request a re-run + on any changes within an entire directory, print a line for the directory and + another line for everything inside it, recursively.) + Note that if the build script itself (or one of its dependencies) changes, + then it's rebuilt and rerun unconditionally, so + `cargo:rerun-if-changed=build.rs` is almost always redundant (unless you + want to ignore changes in all other files except for `build.rs`). +* `rerun-if-env-changed=VAR` is the name of an environment variable which + indicates that if the environment variable's value changes the build script + should be rerun. This basically behaves the same as `rerun-if-changed` except + that it works with environment variables instead. Note that the environment + variables here are intended for global environment variables like `CC` and + such, it's not necessary to use this for env vars like `TARGET` that Cargo + sets. Also note that if `rerun-if-env-changed` is printed out then Cargo will + *only* rerun the build script if those environment variables change or if + files printed out by `rerun-if-changed` change. + +* `warning=MESSAGE` is a message that will be printed to the main console after + a build script has finished running. Warnings are only shown for path + dependencies (that is, those you're working on locally), so for example + warnings printed out in crates.io crates are not emitted by default. Any other element is a user-defined metadata that will be passed to -dependencies. More information about this can be found in the [`links`][links] +dependents. More information about this can be found in the [`links`][links] section. -## Build Dependencies +[links]: #the-links-manifest-key + +### Build Dependencies Build scripts are also allowed to have dependencies on other Cargo-based crates. Dependencies are declared through the `build-dependencies` section of the manifest. ```toml -[build-dependencies.foo] -git = "https://github.com/your-packages/foo" +[build-dependencies] +foo = { git = "https://github.com/your-packages/foo" } ``` The build script **does not** have access to the dependencies listed in the -`dependencies` or `dev-dependencies` section (they're not built yet!). All build +`dependencies` or `dev-dependencies` section (they’re not built yet!). All build dependencies will also not be available to the package itself unless explicitly stated as so. -## The `links` Manifest Key +### The `links` Manifest Key In addition to the manifest key `build`, Cargo also supports a `links` manifest key to declare the name of a native library that is being linked to: @@ -111,11 +170,11 @@ of native dependencies that a package has, as well as providing a principled system of passing metadata between package build scripts. Primarily, Cargo requires that there is at most one package per `links` value. -In other words, it's forbidden to have two packages link to the same native +In other words, it’s forbidden to have two packages link to the same native library. Note, however, that there are [conventions in place][star-sys] to alleviate this. -[star-sys]: #*-sys-packages +[star-sys]: #-sys-packages As mentioned above in the output format, each build script can generate an arbitrary set of metadata in the form of key-value pairs. This metadata is @@ -128,7 +187,7 @@ Note that metadata is only passed to immediate dependents, not transitive dependents. The motivation for this metadata passing is outlined in the linking to system libraries case study below. -## Overriding Build Scripts +### Overriding Build Scripts If a manifest contains a `links` key, then Cargo supports overriding the build script specified with a custom library. The purpose of this functionality is to @@ -136,7 +195,7 @@ prevent running the build script in question altogether and instead supply the metadata ahead of time. To override a build script, place the following configuration in any acceptable -Cargo [configuration location](config.html). +Cargo [configuration location](config.md). ```toml [target.x86_64-unknown-linux-gnu.foo] @@ -156,15 +215,15 @@ With this configuration, if a package declares that it links to `foo` then the build script will **not** be compiled or run, and the metadata specified will instead be used. -# Case study: Code generation +### Case study: Code generation Some Cargo packages need to have code generated just before they are compiled -for various reasons. Here we'll walk through a simple example which generates a +for various reasons. Here we’ll walk through a simple example which generates a library call as part of the build script. -First, let's take a look at the directory structure of this package: +First, let’s take a look at the directory structure of this package: -```notrust +``` . ├── Cargo.toml ├── build.rs @@ -175,7 +234,7 @@ First, let's take a look at the directory structure of this package: ``` Here we can see that we have a `build.rs` build script and our binary in -`main.rs`. Next, let's take a look at the manifest: +`main.rs`. Next, let’s take a look at the manifest: ```toml # Cargo.toml @@ -187,8 +246,8 @@ authors = ["you@example.com"] build = "build.rs" ``` -Here we can see we've got a build script specified which we'll use to generate -some code. Let's see what's inside the build script: +Here we can see we’ve got a build script specified which we’ll use to generate +some code. Let’s see what’s inside the build script: ```rust,no_run // build.rs @@ -211,18 +270,23 @@ fn main() { } ``` -There's a couple of points of note here: +There’s a couple of points of note here: * The script uses the `OUT_DIR` environment variable to discover where the - output files should be located. It can use the process's current working + output files should be located. It can use the process’ current working directory to find where the input files should be located, but in this case we - don't have any input files. + don’t have any input files. +* In general, build scripts should not modify any files outside of `OUT_DIR`. + It may seem fine on the first blush, but it does cause problems when you use + such crate as a dependency, because there's an *implicit* invariant that + sources in `.cargo/registry` should be immutable. `cargo` won't allow such + scripts when packaging. * This script is relatively simple as it just writes out a small generated file. One could imagine that other more fanciful operations could take place such as generating a Rust module from a C header file or another language definition, for example. -Next, let's peek at the library itself: +Next, let’s peek at the library itself: ```rust,ignore // src/main.rs @@ -236,23 +300,21 @@ fn main() { This is where the real magic happens. The library is using the rustc-defined `include!` macro in combination with the `concat!` and `env!` macros to include -the generated file (`mod.rs`) into the crate's compilation. +the generated file (`hello.rs`) into the crate’s compilation. Using the structure shown here, crates can include any number of generated files -from the build script itself. We've also seen a brief example of how a build -script can use a crate as a dependency purely for the build process and not for -the crate itself at runtime. +from the build script itself. -# Case study: Building some native code +### Case study: Building some native code -Sometimes it's necessary to build some native C or C++ code as part of a +Sometimes it’s necessary to build some native C or C++ code as part of a package. This is another excellent use case of leveraging the build script to -build a native library before the Rust crate itself. As an example, we'll create -a Rust library which calls into C to print "Hello, World!". +build a native library before the Rust crate itself. As an example, we’ll create +a Rust library which calls into C to print “Hello, World!”. -Like above, let's first take a look at the project layout: +Like above, let’s first take a look at the package layout: -```notrust +``` . ├── Cargo.toml ├── build.rs @@ -275,7 +337,7 @@ authors = ["you@example.com"] build = "build.rs" ``` -For now we're not going to use any build dependencies, so let's take a look at +For now we’re not going to use any build dependencies, so let’s take a look at the build script now: ```rust,no_run @@ -302,7 +364,7 @@ fn main() { } ``` -This build script starts out by compiling out C file into an object file (by +This build script starts out by compiling our C file into an object file (by invoking `gcc`) and then converting this object file into a static library (by invoking `ar`). The final step is feedback to Cargo itself to say that our output was in `out_dir` and the compiler should link the crate to `libhello.a` @@ -310,11 +372,11 @@ statically via the `-l static=hello` flag. Note that there are a number of drawbacks to this hardcoded approach: -* The `gcc` command itself is not portable across platforms. For example it's +* The `gcc` command itself is not portable across platforms. For example it’s unlikely that Windows platforms have `gcc`, and not even all Unix platforms may have `gcc`. The `ar` command is also in a similar situation. -* These commands do not take cross-compilation into account. If we're cross - compiling for a platform such as Android it's unlikely that `gcc` will produce +* These commands do not take cross-compilation into account. If we’re cross + compiling for a platform such as Android it’s unlikely that `gcc` will produce an ARM executable. Not to fear, though, this is where a `build-dependencies` entry would help! The @@ -324,40 +386,42 @@ portable, and standardized. For example, the build script could be written as: ```rust,ignore // build.rs -// Bring in a dependency on an externally maintained `gcc` package which manages +// Bring in a dependency on an externally maintained `cc` package which manages // invoking the C compiler. -extern crate gcc; +extern crate cc; fn main() { - gcc::compile_library("libhello.a", &["src/hello.c"]).unwrap(); + cc::Build::new() + .file("src/hello.c") + .compile("hello"); } ``` -Add a build time dependency on the `gcc` crate with the following addition to +Add a build time dependency on the `cc` crate with the following addition to your `Cargo.toml`: ```toml [build-dependencies] -gcc = "0.3" +cc = "1.0" ``` -The [`gcc` crate](https://crates.io/crates/gcc) abstracts a range of build +The [`cc` crate](https://crates.io/crates/cc) abstracts a range of build script requirements for C code: * It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc` - for Unix platforms, etc). + for Unix platforms, etc.). * It takes the `TARGET` variable into account by passing appropriate flags to the compiler being used. -* Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc, are all +* Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all handled automatically. -* The stdout output and `OUT_DIR` locations are also handled by the `gcc` +* The stdout output and `OUT_DIR` locations are also handled by the `cc` library. Here we can start to see some of the major benefits of farming as much functionality as possible out to common build dependencies rather than duplicating logic across all build scripts! -Back to the case study though, let's take a quick look at the contents of the +Back to the case study though, let’s take a quick look at the contents of the `src` directory: ```c @@ -373,7 +437,7 @@ void hello() { ```rust,ignore // src/main.rs -// Note the lack of the `#[link]` attribute. We're delegating the responsibility +// Note the lack of the `#[link]` attribute. We’re delegating the responsibility // of selecting what to link to over to the build script rather than hardcoding // it in the source file. extern { fn hello(); } @@ -387,7 +451,10 @@ And there we go! This should complete our example of building some C code from a Cargo package using the build script itself. This also shows why using a build dependency can be crucial in many situations and even much more concise! -# Case study: Linking to system libraries +We’ve also seen a brief example of how a build script can use a crate as a +dependency purely for the build process and not for the crate itself at runtime. + +### Case study: Linking to system libraries The final case study here will be investigating how a Cargo library links to a system library and how the build script is leveraged to support this use case. @@ -399,8 +466,8 @@ performing this in a platform-agnostic fashion, and the purpose of a build script is again to farm out as much of this as possible to make this as easy as possible for consumers. -As an example to follow, let's take a look at one of [Cargo's own -dependencies][git2-rs], [libgit2][libgit2]. This library has a number of +As an example to follow, let’s take a look at one of [Cargo’s own +dependencies][git2-rs], [libgit2][libgit2]. The C library has a number of constraints: [git2-rs]: https://github.com/alexcrichton/git2-rs/tree/master/libgit2-sys @@ -413,8 +480,8 @@ constraints: * It is often not installed on all systems by default. * It can be built from source using `cmake`. -To visualize what's going on here, let's take a look at the manifest for the -relevant Cargo package. +To visualize what’s going on here, let’s take a look at the manifest for the +relevant Cargo package that links to the native C library. ```toml [package] @@ -424,36 +491,37 @@ authors = ["..."] links = "git2" build = "build.rs" -[dependencies.libssh2-sys] -git = "https://github.com/alexcrichton/ssh2-rs" +[dependencies] +libssh2-sys = { git = "https://github.com/alexcrichton/ssh2-rs" } -[target.x86_64-unknown-linux-gnu.dependencies.openssl-sys] -git = "https://github.com/alexcrichton/openssl-sys" +[target.'cfg(unix)'.dependencies] +openssl-sys = { git = "https://github.com/alexcrichton/openssl-sys" } # ... ``` -As the above manifests show, we've got a `build` script specified, but it's +As the above manifests show, we’ve got a `build` script specified, but it’s worth noting that this example has a `links` entry which indicates that the crate (`libgit2-sys`) links to the `git2` native library. -Here we also see the unconditional dependency on `libssh2` via the -`libssh2-sys` crate, as well as a platform-specific dependency on `openssl-sys` -for unix (other variants elided for now). It may seem a little counterintuitive -to express *C dependencies* in the *Cargo manifest*, but this is actually using -one of Cargo's conventions in this space. +Here we also see that we chose to have the Rust crate have an unconditional +dependency on `libssh2` via the `libssh2-sys` crate, as well as a +platform-specific dependency on `openssl-sys` for \*nix (other variants elided +for now). It may seem a little counterintuitive to express *C dependencies* in +the *Cargo manifest*, but this is actually using one of Cargo’s conventions in +this space. -## `*-sys` Packages +### `*-sys` Packages -To alleviate linking to system libraries, Cargo has a *convention* of package -naming and functionality. Any package named `foo-sys` will provide two major +To alleviate linking to system libraries, crates.io has a *convention* of package +naming and functionality. Any package named `foo-sys` should provide two major pieces of functionality: -* The library crate will link to the native library `libfoo`. This will often +* The library crate should link to the native library `libfoo`. This will often probe the current system for `libfoo` before resorting to building from source. -* The library crate will provide **declarations** for functions in `libfoo`, - but it does **not** provide bindings or higher-level abstractions. +* The library crate should provide **declarations** for functions in `libfoo`, + but **not** bindings or higher-level abstractions. The set of `*-sys` packages provides a common set of dependencies for linking to native libraries. There are a number of benefits earned from having this @@ -465,42 +533,42 @@ convention of native-library-related packages: (or building it from source). * These dependencies are easily overridable. -## Building libgit2 +### Building libgit2 -Now that we've got libgit2's dependencies sorted out, we need to actually write -the build script. We're not going to look at specific snippets of code here and +Now that we’ve got libgit2’s dependencies sorted out, we need to actually write +the build script. We’re not going to look at specific snippets of code here and instead only take a look at the high-level details of the build script of `libgit2-sys`. This is not recommending all packages follow this strategy, but rather just outlining one specific strategy. The first step of the build script should do is to query whether libgit2 is -already installed on the host system. To do this we'll leverage the preexisting -tool `pkg-config` (when its available). We'll also use a `build-dependencies` -section to refactor out all the `pkg-config` related code (or someone's already +already installed on the host system. To do this we’ll leverage the preexisting +tool `pkg-config` (when its available). We’ll also use a `build-dependencies` +section to refactor out all the `pkg-config` related code (or someone’s already done that!). -If `pkg-config` failed to find libgit2, or if `pkg-config` just wasn't +If `pkg-config` failed to find libgit2, or if `pkg-config` just wasn’t installed, the next step is to build libgit2 from bundled source code (distributed as part of `libgit2-sys` itself). There are a few nuances when doing so that we need to take into account, however: -* The build system of libgit2, `cmake`, needs to be able to find libgit2's - optional dependency of libssh2. We're sure we've already built it (it's a +* The build system of libgit2, `cmake`, needs to be able to find libgit2’s + optional dependency of libssh2. We’re sure we’ve already built it (it’s a Cargo dependency), we just need to communicate this information. To do this we leverage the metadata format to communicate information between build scripts. In this example the libssh2 package printed out `cargo:root=...` to tell us where libssh2 is installed at, and we can then pass this along to cmake with the `CMAKE_PREFIX_PATH` environment variable. -* We'll need to handle some `CFLAGS` values when compiling C code (and tell +* We’ll need to handle some `CFLAGS` values when compiling C code (and tell `cmake` about this). Some flags we may want to pass are `-m64` for 64-bit code, `-m32` for 32-bit code, or `-fPIC` for 64-bit code as well. -* Finally, we'll invoke `cmake` to place all output into the `OUT_DIR` - environment variable, and then we'll print the necessary metadata to instruct +* Finally, we’ll invoke `cmake` to place all output into the `OUT_DIR` + environment variable, and then we’ll print the necessary metadata to instruct rustc how to link to libgit2. Most of the functionality of this build script is easily refactorable into -common dependencies, so our build script isn't quite as intimidating as this -descriptions! In reality it's expected that build scripts are quite succinct by +common dependencies, so our build script isn’t quite as intimidating as this +descriptions! In reality it’s expected that build scripts are quite succinct by farming logic such as above to build dependencies. diff --git a/src/doc/src/reference/config.md b/src/doc/src/reference/config.md new file mode 100644 index 00000000000..1d9a55cdc57 --- /dev/null +++ b/src/doc/src/reference/config.md @@ -0,0 +1,202 @@ +## Configuration + +This document will explain how Cargo’s configuration system works, as well as +available keys or configuration. For configuration of a package through its +manifest, see the [manifest format](manifest.md). + +### Hierarchical structure + +Cargo allows local configuration for a particular package as well as global +configuration, like git. Cargo extends this to a hierarchical strategy. +If, for example, Cargo were invoked in `/projects/foo/bar/baz`, then the +following configuration files would be probed for and unified in this order: + +* `/projects/foo/bar/baz/.cargo/config` +* `/projects/foo/bar/.cargo/config` +* `/projects/foo/.cargo/config` +* `/projects/.cargo/config` +* `/.cargo/config` +* `$CARGO_HOME/config` (`$CARGO_HOME` defaults to `$HOME/.cargo`) + +With this structure, you can specify configuration per-package, and even +possibly check it into version control. You can also specify personal defaults +with a configuration file in your home directory. + +### Configuration format + +All configuration is currently in the [TOML format][toml] (like the manifest), +with simple key-value pairs inside of sections (tables) which all get merged +together. + +[toml]: https://github.com/toml-lang/toml + +### Configuration keys + +All of the following keys are optional, and their defaults are listed as their +value unless otherwise noted. + +Key values that specify a tool may be given as an absolute path, a relative path +or as a pathless tool name. Absolute paths and pathless tool names are used as +given. Relative paths are resolved relative to the parent directory of the +`.cargo` directory of the config file that the value resides within. + +```toml +# An array of paths to local repositories which are to be used as overrides for +# dependencies. For more information see the Specifying Dependencies guide. +paths = ["/path/to/override"] + +[cargo-new] +# This is your name/email to place in the `authors` section of a new Cargo.toml +# that is generated. If not present, then `git` will be probed, and if that is +# not present then `$USER` and `$EMAIL` will be used. +name = "..." +email = "..." + +# By default `cargo new` will initialize a new Git repository. This key can +# be set to change the version control system used. Valid values are `git`, +# `hg` (for Mecurial), `pijul`, `fossil`, or `none` to disable this behavior. +vcs = "none" + +# For the following sections, $triple refers to any valid target triple, not the +# literal string "$triple", and it will apply whenever that target triple is +# being compiled to. 'cfg(...)' refers to the Rust-like `#[cfg]` syntax for +# conditional compilation. +[target.$triple] +# This is the linker which is passed to rustc (via `-C linker=`) when the `$triple` +# is being compiled for. By default this flag is not passed to the compiler. +linker = ".." +# Same but for the library archiver which is passed to rustc via `-C ar=`. +ar = ".." +# If a runner is provided, compiled targets for the `$triple` will be executed +# by invoking the specified runner executable with actual target as first argument. +# This applies to `cargo run`, `cargo test` and `cargo bench` commands. +# By default compiled targets are executed directly. +runner = ".." +# custom flags to pass to all compiler invocations that target $triple +# this value overrides build.rustflags when both are present +rustflags = ["..", ".."] + +[target.'cfg(...)'] +# Similar for the $triple configuration, but using the `cfg` syntax. +# If several `cfg` and $triple targets are candidates, then the rustflags +# are concatenated. The `cfg` syntax only applies to rustflags, and not to +# linker. +rustflags = ["..", ".."] +# Similar for the $triple configuration, but using the `cfg` syntax. +# If one or more `cfg`s, and a $triple target are candidates, then the $triple +# will be used +# If several `cfg` are candidates, then the build will error +runner = ".." + +# Configuration keys related to the registry +[registry] +index = "..." # URL of the registry index (defaults to the index of crates.io) +default = "..." # Name of the default registry to use (can be overridden with + # --registry) + +# Configuration keys for registries other than crates.io. +# `$name` should be the name of the registry, which will be used for +# dependencies in `Cargo.toml` files and the `--registry` command-line flag. +# Registry names should only contain alphanumeric characters, `-`, or `_`. +[registries.$name] +index = "..." # URL of the registry index + +[http] +proxy = "host:port" # HTTP proxy to use for HTTP requests (defaults to none) + # in libcurl format, e.g., "socks5h://host:port" +timeout = 30 # Timeout for each HTTP request, in seconds +cainfo = "cert.pem" # Path to Certificate Authority (CA) bundle (optional) +check-revoke = true # Indicates whether SSL certs are checked for revocation +low-speed-limit = 5 # Lower threshold for bytes/sec (10 = default, 0 = disabled) +multiplexing = true # whether or not to use HTTP/2 multiplexing where possible + +# This setting can be used to help debug what's going on with HTTP requests made +# by Cargo. When set to `true` then Cargo's normal debug logging will be filled +# in with HTTP information, which you can extract with +# `CARGO_LOG=cargo::ops::registry=debug` (and `trace` may print more). +# +# Be wary when posting these logs elsewhere though, it may be the case that a +# header has an authentication token in it you don't want leaked! Be sure to +# briefly review logs before posting them. +debug = false + +[build] +jobs = 1 # number of parallel jobs, defaults to # of CPUs +rustc = "rustc" # the rust compiler tool +rustdoc = "rustdoc" # the doc generator tool +target = "triple" # build for the target triple (ignored by `cargo install`) +target-dir = "target" # path of where to place all generated artifacts +rustflags = ["..", ".."] # custom flags to pass to all compiler invocations +rustdocflags = ["..", ".."] # custom flags to pass to rustdoc +incremental = true # whether or not to enable incremental compilation + # If `incremental` is not set, then the value from + # the profile is used. +dep-info-basedir = ".." # full path for the base directory for targets in depfiles + +[term] +verbose = false # whether cargo provides verbose output +color = 'auto' # whether cargo colorizes output + +# Network configuration +[net] +retry = 2 # number of times a network call will automatically retried +git-fetch-with-cli = false # if `true` we'll use `git`-the-CLI to fetch git repos +offline = false # do not access the network, but otherwise try to proceed if possible + +# Alias cargo commands. The first 4 aliases are built in. If your +# command requires grouped whitespace use the list format. +[alias] +b = "build" +c = "check" +t = "test" +r = "run" +rr = "run --release" +space_example = ["run", "--release", "--", "\"command list\""] +``` + +### Environment variables + +Cargo can also be configured through environment variables in addition to the +TOML syntax above. For each configuration key above of the form `foo.bar` the +environment variable `CARGO_FOO_BAR` can also be used to define the value. For +example the `build.jobs` key can also be defined by `CARGO_BUILD_JOBS`. + +Environment variables will take precedent over TOML configuration, and currently +only integer, boolean, and string keys are supported to be defined by +environment variables. This means that [source replacement][source], which is expressed by +tables, cannot be configured through environment variables. + +In addition to the system above, Cargo recognizes a few other specific +[environment variables][env]. + +### Credentials + +Configuration values with sensitive information are stored in the +`$CARGO_HOME/credentials` file. This file is automatically created and updated +by [`cargo login`]. It follows the same format as Cargo config files. + +```toml +[registry] +token = "..." # Access token for crates.io + +# `$name` should be a registry name (see above for more information about +# configuring registries). +[registries.$name] +token = "..." # Access token for the named registry +``` + +Tokens are used by some Cargo commands such as [`cargo publish`] for +authenticating with remote registries. Care should be taken to protect the +tokens and to keep them secret. + +As with most other config values, tokens may be specified with environment +variables. The token for crates.io may be specified with the +`CARGO_REGISTRY_TOKEN` environment variable. Tokens for other registries may +be specified with environment variables of the form +`CARGO_REGISTRIES_NAME_TOKEN` where `NAME` is the name of the registry in all +capital letters. + +[`cargo login`]: ../commands/cargo-login.md +[`cargo publish`]: ../commands/cargo-publish.md +[env]: environment-variables.md +[source]: source-replacement.md diff --git a/src/doc/src/reference/environment-variables.md b/src/doc/src/reference/environment-variables.md new file mode 100644 index 00000000000..7f63a48d652 --- /dev/null +++ b/src/doc/src/reference/environment-variables.md @@ -0,0 +1,144 @@ +## Environment Variables + +Cargo sets and reads a number of environment variables which your code can detect +or override. Here is a list of the variables Cargo sets, organized by when it interacts +with them: + +### Environment variables Cargo reads + +You can override these environment variables to change Cargo's behavior on your +system: + +* `CARGO_HOME` — Cargo maintains a local cache of the registry index and of git + checkouts of crates. By default these are stored under `$HOME/.cargo`, but + this variable overrides the location of this directory. Once a crate is cached + it is not removed by the clean command. +* `CARGO_TARGET_DIR` — Location of where to place all generated artifacts, + relative to the current working directory. +* `RUSTC` — Instead of running `rustc`, Cargo will execute this specified + compiler instead. +* `RUSTC_WRAPPER` — Instead of simply running `rustc`, Cargo will execute this + specified wrapper instead, passing as its commandline arguments the rustc + invocation, with the first argument being rustc. +* `RUSTDOC` — Instead of running `rustdoc`, Cargo will execute this specified + `rustdoc` instance instead. +* `RUSTDOCFLAGS` — A space-separated list of custom flags to pass to all `rustdoc` + invocations that Cargo performs. In contrast with `cargo rustdoc`, this is + useful for passing a flag to *all* `rustdoc` instances. +* `RUSTFLAGS` — A space-separated list of custom flags to pass to all compiler + invocations that Cargo performs. In contrast with `cargo rustc`, this is + useful for passing a flag to *all* compiler instances. +* `CARGO_INCREMENTAL` — If this is set to 1 then Cargo will force incremental + compilation to be enabled for the current compilation, and when set to 0 it + will force disabling it. If this env var isn't present then cargo's defaults + will otherwise be used. +* `CARGO_CACHE_RUSTC_INFO` — If this is set to 0 then Cargo will not try to cache + compiler version information. + +Note that Cargo will also read environment variables for `.cargo/config` +configuration values, as described in [that documentation][config-env] + +[config-env]: config.md#environment-variables + +### Environment variables Cargo sets for crates + +Cargo exposes these environment variables to your crate when it is compiled. +Note that this applies for test binaries as well. +To get the value of any of these variables in a Rust program, do this: + +```rust +let version = env!("CARGO_PKG_VERSION"); +``` + +`version` will now contain the value of `CARGO_PKG_VERSION`. + +* `CARGO` - Path to the `cargo` binary performing the build. +* `CARGO_MANIFEST_DIR` - The directory containing the manifest of your package. +* `CARGO_PKG_VERSION` - The full version of your package. +* `CARGO_PKG_VERSION_MAJOR` - The major version of your package. +* `CARGO_PKG_VERSION_MINOR` - The minor version of your package. +* `CARGO_PKG_VERSION_PATCH` - The patch version of your package. +* `CARGO_PKG_VERSION_PRE` - The pre-release version of your package. +* `CARGO_PKG_AUTHORS` - Colon separated list of authors from the manifest of your package. +* `CARGO_PKG_NAME` - The name of your package. +* `CARGO_PKG_DESCRIPTION` - The description from the manifest of your package. +* `CARGO_PKG_HOMEPAGE` - The home page from the manifest of your package. +* `CARGO_PKG_REPOSITORY` - The repository from the manifest of your package. +* `OUT_DIR` - If the package has a build script, this is set to the folder where the build + script should place its output. See below for more information. + +### Environment variables Cargo sets for build scripts + +Cargo sets several environment variables when build scripts are run. Because these variables +are not yet set when the build script is compiled, the above example using `env!` won't work +and instead you'll need to retrieve the values when the build script is run: + +```rust +use std::env; +let out_dir = env::var("OUT_DIR").unwrap(); +``` + +`out_dir` will now contain the value of `OUT_DIR`. + +* `CARGO` - Path to the `cargo` binary performing the build. +* `CARGO_MANIFEST_DIR` - The directory containing the manifest for the package + being built (the package containing the build + script). Also note that this is the value of the + current working directory of the build script when it + starts. +* `CARGO_MANIFEST_LINKS` - the manifest `links` value. +* `CARGO_FEATURE_` - For each activated feature of the package being + built, this environment variable will be present + where `` is the name of the feature uppercased + and having `-` translated to `_`. +* `CARGO_CFG_` - For each [configuration option][configuration] of the + package being built, this environment variable will + contain the value of the configuration, where `` is + the name of the configuration uppercased and having `-` + translated to `_`. + Boolean configurations are present if they are set, and + not present otherwise. + Configurations with multiple values are joined to a + single variable with the values delimited by `,`. +* `OUT_DIR` - the folder in which all output should be placed. This folder is + inside the build directory for the package being built, and it is + unique for the package in question. +* `TARGET` - the target triple that is being compiled for. Native code should be + compiled for this triple. See the [Target Triple] description + for more information. +* `HOST` - the host triple of the rust compiler. +* `NUM_JOBS` - the parallelism specified as the top-level parallelism. This can + be useful to pass a `-j` parameter to a system like `make`. Note + that care should be taken when interpreting this environment + variable. For historical purposes this is still provided but + recent versions of Cargo, for example, do not need to run `make + -j` as it'll automatically happen. Cargo implements its own + [jobserver] and will allow build scripts to inherit this + information, so programs compatible with GNU make jobservers will + already have appropriately configured parallelism. +* `OPT_LEVEL`, `DEBUG` - values of the corresponding variables for the + profile currently being built. +* `PROFILE` - `release` for release builds, `debug` for other builds. +* `DEP__` - For more information about this set of environment + variables, see build script documentation about [`links`][links]. +* `RUSTC`, `RUSTDOC` - the compiler and documentation generator that Cargo has + resolved to use, passed to the build script so it might + use it as well. +* `RUSTC_LINKER` - The path to the linker binary that Cargo has resolved to use + for the current target, if specified. The linker can be + changed by editing `.cargo/config`; see the documentation + about [cargo configuration][cargo-config] for more + information. + +[links]: build-scripts.md#the-links-manifest-key +[configuration]: ../../reference/conditional-compilation.html +[jobserver]: https://www.gnu.org/software/make/manual/html_node/Job-Slots.html +[cargo-config]: config.md +[Target Triple]: ../appendix/glossary.md#target + +### Environment variables Cargo sets for 3rd party subcommands + +Cargo exposes this environment variable to 3rd party subcommands +(ie. programs named `cargo-foobar` placed in `$PATH`): + +* `CARGO` - Path to the `cargo` binary performing the build. diff --git a/src/doc/src/reference/external-tools.md b/src/doc/src/reference/external-tools.md new file mode 100644 index 00000000000..ea055fcc905 --- /dev/null +++ b/src/doc/src/reference/external-tools.md @@ -0,0 +1,114 @@ +## External tools + +One of the goals of Cargo is simple integration with third-party tools, like +IDEs and other build systems. To make integration easier, Cargo has several +facilities: + +* a `cargo metadata` command, which outputs package structure and dependencies + information in JSON, + +* a `--message-format` flag, which outputs information about a particular build, + and + +* support for custom subcommands. + + +### Information about package structure + +You can use `cargo metadata` command to get information about package structure +and dependencies. The output of the command looks like this: + +```text +{ + // Integer version number of the format. + "version": integer, + + // List of packages for this workspace, including dependencies. + "packages": [ + { + // Opaque package identifier. + "id": PackageId, + + "name": string, + + "version": string, + + "source": SourceId, + + // A list of declared dependencies, see `resolve` field for actual dependencies. + "dependencies": [ Dependency ], + + "targets: [ Target ], + + // Path to Cargo.toml + "manifest_path": string, + } + ], + + "workspace_members": [ PackageId ], + + // Dependencies graph. + "resolve": { + "nodes": [ + { + "id": PackageId, + "dependencies": [ PackageId ] + } + ] + } +} +``` + +The format is stable and versioned. When calling `cargo metadata`, you should +pass `--format-version` flag explicitly to avoid forward incompatibility +hazard. + +If you are using Rust, there is [cargo_metadata] crate. + +[cargo_metadata]: https://crates.io/crates/cargo_metadata + + +### Information about build + +When passing `--message-format=json`, Cargo will output the following +information during the build: + +* compiler errors and warnings, + +* produced artifacts, + +* results of the build scripts (for example, native dependencies). + +The output goes to stdout in the JSON object per line format. The `reason` field +distinguishes different kinds of messages. + +Information about dependencies in the Makefile-compatible format is stored in +the `.d` files alongside the artifacts. + + +### Custom subcommands + +Cargo is designed to be extensible with new subcommands without having to modify +Cargo itself. This is achieved by translating a cargo invocation of the form +cargo `(?[^ ]+)` into an invocation of an external tool +`cargo-${command}`. The external tool must be present in one of the user's +`$PATH` directories. + +When Cargo invokes a custom subcommand, the first argument to the subcommand +will be the filename of the custom subcommand, as usual. The second argument +will be the subcommand name itself. For example, the second argument would be +`${command}` when invoking `cargo-${command}`. Any additional arguments on the +command line will be forwarded unchanged. + +Cargo can also display the help output of a custom subcommand with `cargo help +${command}`. Cargo assumes that the subcommand will print a help message if its +third argument is `--help`. So, `cargo help ${command}` would invoke +`cargo-${command} ${command} --help`. + +Custom subcommands may use the `CARGO` environment variable to call back to +Cargo. Alternatively, it can link to `cargo` crate as a library, but this +approach has drawbacks: + +* Cargo as a library is unstable: the API may change without deprecation + +* versions of the linked Cargo library may be different from the Cargo binary diff --git a/src/doc/src/reference/index.md b/src/doc/src/reference/index.md new file mode 100644 index 00000000000..a9bf76c8537 --- /dev/null +++ b/src/doc/src/reference/index.md @@ -0,0 +1,14 @@ +## Cargo Reference + +The reference covers the details of various areas of Cargo. + +* [Specifying Dependencies](specifying-dependencies.md) +* [The Manifest Format](manifest.md) +* [Configuration](config.md) +* [Environment Variables](environment-variables.md) +* [Build Scripts](build-scripts.md) +* [Publishing on crates.io](publishing.md) +* [Package ID Specifications](pkgid-spec.md) +* [Source Replacement](source-replacement.md) +* [External Tools](external-tools.md) +* [Unstable Features](unstable.md) diff --git a/src/doc/src/reference/manifest.md b/src/doc/src/reference/manifest.md new file mode 100644 index 00000000000..ea60e8c5578 --- /dev/null +++ b/src/doc/src/reference/manifest.md @@ -0,0 +1,990 @@ +## The Manifest Format + +The `Cargo.toml` file for each package is called its *manifest*. Every manifest +file consists of one or more sections. + +### The `[package]` section + +The first section in a `Cargo.toml` is `[package]`. + +```toml +[package] +name = "hello_world" # the name of the package +version = "0.1.0" # the current version, obeying semver +authors = ["Alice ", "Bob "] +``` + +#### The `name` field + +The package name is an identifier used to refer to the package. It is used +when listed as a dependency in another package, and as the default name of +inferred lib and bin targets. + +The name must not be empty, use only [alphanumeric] characters or `-` or `_`. +Note that [`cargo new`] and [`cargo init`] impose some additional restrictions on +the package name, such as enforcing that it is a valid Rust identifier and not +a keyword. [crates.io][cratesio] imposes even more restrictions, such as +enforcing only ASCII characters, not a reserved name, not a special Windows +name such as "nul", is not too long, etc. + +[alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric + +#### The `version` field + +Cargo bakes in the concept of [Semantic +Versioning](https://semver.org/), so make sure you follow some basic rules: + +* Before you reach 1.0.0, anything goes, but if you make breaking changes, + increment the minor version. In Rust, breaking changes include adding fields to + structs or variants to enums. +* After 1.0.0, only make breaking changes when you increment the major version. + Don’t break the build. +* After 1.0.0, don’t add any new public API (no new `pub` anything) in patch-level + versions. Always increment the minor version if you add any new `pub` structs, + traits, fields, types, functions, methods or anything else. +* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0. + +#### The `authors` field (optional) + +The `authors` field lists people or organizations that are considered the +"authors" of the package. The exact meaning is open to interpretation — it may +list the original or primary authors, current maintainers, or owners of the +package. These names will be listed on the crate's page on +[crates.io][cratesio]. An optional email address may be included within angled +brackets at the end of each author. + +#### The `edition` field (optional) + +You can opt in to a specific Rust Edition for your package with the +`edition` key in `Cargo.toml`. If you don't specify the edition, it will +default to 2015. + +```toml +[package] +# ... +edition = '2018' +``` + +The `edition` key affects which edition your package is compiled with. Cargo +will always generate packages via [`cargo new`] with the `edition` key set to the +latest edition. Setting the `edition` key in `[package]` will affect all +targets/crates in the package, including test suites, benchmarks, binaries, +examples, etc. + +#### The `build` field (optional) + +This field specifies a file in the package root which is a [build script] for +building native code. More information can be found in the [build script +guide][build script]. + +[build script]: build-scripts.md + +```toml +[package] +# ... +build = "build.rs" +``` + +#### The `links` field (optional) + +This field specifies the name of a native library that is being linked to. +More information can be found in the [`links`][links] section of the build +script guide. + +[links]: build-scripts.md#the-links-manifest-key + +```toml +[package] +# ... +links = "foo" +build = "build.rs" +``` + +#### The `documentation` field (optional) + +This field specifies a URL to a website hosting the crate's documentation. +If no URL is specified in the manifest file, [crates.io][cratesio] will +automatically link your crate to the corresponding [docs.rs][docsrs] page. + +Documentation links from specific hosts are blacklisted. Hosts are added +to the blacklist if they are known to not be hosting documentation and are +possibly of malicious intent e.g., ad tracking networks. URLs from the +following hosts are blacklisted: + +* rust-ci.org + +Documentation URLs from blacklisted hosts will not appear on crates.io, and +may be replaced by docs.rs links. + +[docsrs]: https://docs.rs/ +[cratesio]: https://crates.io/ + +#### The `exclude` and `include` fields (optional) + +You can explicitly specify that a set of file patterns should be ignored or +included for the purposes of packaging. The patterns specified in the +`exclude` field identify a set of files that are not included, and the +patterns in `include` specify files that are explicitly included. + +The patterns should be [gitignore]-style patterns. Briefly: + +- `foo` matches any file or directory with the name `foo` anywhere in the + package. This is equivalent to the pattern `**/foo`. +- `/foo` matches any file or directory with the name `foo` only in the root of + the package. +- `foo/` matches any *directory* with the name `foo` anywhere in the package. +- Common glob patterns like `*`, `?`, and `[]` are supported: + - `*` matches zero or more characters except `/`. For example, `*.html` + matches any file or directory with the `.html` extension anywhere in the + package. + - `?` matches any character except `/`. For example, `foo?` matches `food`, + but not `foo`. + - `[]` allows for matching a range of characters. For example, `[ab]` + matches either `a` or `b`. `[a-z]` matches letters a through z. +- `**/` prefix matches in any directory. For example, `**/foo/bar` matches the + file or directory `bar` anywhere that is directly under directory `foo`. +- `/**` suffix matches everything inside. For example, `foo/**` matches all + files inside directory `foo`, including all files in subdirectories below + `foo`. +- `/**/` matches zero or more directories. For example, `a/**/b` matches + `a/b`, `a/x/b`, `a/x/y/b`, and so on. +- `!` prefix negates a pattern. For example, a pattern of `src/**.rs` and + `!foo.rs` would match all files with the `.rs` extension inside the `src` + directory, except for any file named `foo.rs`. + +If git is being used for a package, the `exclude` field will be seeded with +the `gitignore` settings from the repository. + +```toml +[package] +# ... +exclude = ["build/**/*.o", "doc/**/*.html"] +``` + +```toml +[package] +# ... +include = ["src/**/*", "Cargo.toml"] +``` + +The options are mutually exclusive: setting `include` will override an +`exclude`. Note that `include` must be an exhaustive list of files as otherwise +necessary source files may not be included. The package's `Cargo.toml` is +automatically included. + +The include/exclude list is also used for change tracking in some situations. +For targets built with `rustdoc`, it is used to determine the list of files to +track to determine if the target should be rebuilt. If the package has a +[build script] that does not emit any `rerun-if-*` directives, then the +include/exclude list is used for tracking if the build script should be re-run +if any of those files change. + +[gitignore]: https://git-scm.com/docs/gitignore + +#### The `publish` field (optional) + +The `publish` field can be used to prevent a package from being published to a +package registry (like *crates.io*) by mistake, for instance to keep a package +private in a company. + +```toml +[package] +# ... +publish = false +``` + +The value may also be an array of strings which are registry names that are +allowed to be published to. + +```toml +[package] +# ... +publish = ["some-registry-name"] +``` + +#### The `workspace` field (optional) + +The `workspace` field can be used to configure the workspace that this package +will be a member of. If not specified this will be inferred as the first +Cargo.toml with `[workspace]` upwards in the filesystem. + +```toml +[package] +# ... +workspace = "path/to/workspace/root" +``` + +For more information, see the documentation for the workspace table below. + +#### Package metadata + +There are a number of optional metadata fields also accepted under the +`[package]` section: + +```toml +[package] +# ... + +# A short blurb about the package. This is not rendered in any format when +# uploaded to crates.io (aka this is not markdown). +description = "..." + +# These URLs point to more information about the package. These are +# intended to be webviews of the relevant data, not necessarily compatible +# with VCS tools and the like. +documentation = "..." +homepage = "..." +repository = "..." + +# This points to a file under the package root (relative to this `Cargo.toml`). +# The contents of this file are stored and indexed in the registry. +# crates.io will render this file and place the result on the crate's page. +readme = "..." + +# This is a list of up to five keywords that describe this crate. Keywords +# are searchable on crates.io, and you may choose any words that would +# help someone find this crate. +keywords = ["...", "..."] + +# This is a list of up to five categories where this crate would fit. +# Categories are a fixed list available at crates.io/category_slugs, and +# they must match exactly. +categories = ["...", "..."] + +# This is an SPDX 2.1 license expression for this package. Currently +# crates.io will validate the license provided against a whitelist of +# known license and exception identifiers from the SPDX license list +# 2.4. Parentheses are not currently supported. +# +# Multiple licenses can be separated with a `/`, although that usage +# is deprecated. Instead, use a license expression with AND and OR +# operators to get more explicit semantics. +license = "..." + +# If a package is using a nonstandard license, then this key may be specified in +# lieu of the above key and must point to a file relative to this manifest +# (similar to the readme key). +license-file = "..." + +# Optional specification of badges to be displayed on crates.io. +# +# - The badges pertaining to build status that are currently available are +# Appveyor, CircleCI, Cirrus CI, GitLab, Azure DevOps and TravisCI. +# - Available badges pertaining to code test coverage are Codecov and +# Coveralls. +# - There are also maintenance-related badges based on isitmaintained.com +# which state the issue resolution time, percent of open issues, and future +# maintenance intentions. +# +# If a `repository` key is required, this refers to a repository in +# `user/repo` format. +[badges] + +# Appveyor: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default), `bitbucket`, and +# `gitlab`; `id` is optional; you can specify the appveyor project id if you +# want to use that instead. `project_name` is optional; use when the repository +# name differs from the appveyor project name. +appveyor = { repository = "...", branch = "master", service = "github" } + +# Circle CI: `repository` is required. `branch` is optional; default is `master` +circle-ci = { repository = "...", branch = "master" } + +# Cirrus CI: `repository` is required. `branch` is optional; default is `master` +cirrus-ci = { repository = "...", branch = "master" } + +# GitLab: `repository` is required. `branch` is optional; default is `master` +gitlab = { repository = "...", branch = "master" } + +# Azure DevOps: `project` is required. `pipeline` is required. `build` is optional; default is `1` +# Note: project = `organization/project`, pipeline = `name_of_pipeline`, build = `definitionId` +azure-devops = { project = "...", pipeline = "...", build="2" } + +# Travis CI: `repository` in format "/" is required. +# `branch` is optional; default is `master` +travis-ci = { repository = "...", branch = "master" } + +# Codecov: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default), `bitbucket`, and +# `gitlab`. +codecov = { repository = "...", branch = "master", service = "github" } + +# Coveralls: `repository` is required. `branch` is optional; default is `master` +# `service` is optional; valid values are `github` (default) and `bitbucket`. +coveralls = { repository = "...", branch = "master", service = "github" } + +# Is it maintained resolution time: `repository` is required. +is-it-maintained-issue-resolution = { repository = "..." } + +# Is it maintained percentage of open issues: `repository` is required. +is-it-maintained-open-issues = { repository = "..." } + +# Maintenance: `status` is required. Available options are: +# - `actively-developed`: New features are being added and bugs are being fixed. +# - `passively-maintained`: There are no plans for new features, but the maintainer intends to +# respond to issues that get filed. +# - `as-is`: The crate is feature complete, the maintainer does not intend to continue working on +# it or providing support, but it works for the purposes it was designed for. +# - `experimental`: The author wants to share it with the community but is not intending to meet +# anyone's particular use case. +# - `looking-for-maintainer`: The current maintainer would like to transfer the crate to someone +# else. +# - `deprecated`: The maintainer does not recommend using this crate (the description of the crate +# can describe why, there could be a better solution available or there could be problems with +# the crate that the author does not want to fix). +# - `none`: Displays no badge on crates.io, since the maintainer has not chosen to specify +# their intentions, potential crate users will need to investigate on their own. +maintenance = { status = "..." } +``` + +The [crates.io](https://crates.io) registry will render the description, display +the license, link to the three URLs and categorize by the keywords. These keys +provide useful information to users of the registry and also influence the +search ranking of a crate. It is highly discouraged to omit everything in a +published crate. + +SPDX 2.1 license expressions are documented +[here][spdx-2.1-license-expressions]. The current version of the +license list is available [here][spdx-license-list], and version 2.4 +is available [here][spdx-license-list-2.4]. + +#### The `metadata` table (optional) + +Cargo by default will warn about unused keys in `Cargo.toml` to assist in +detecting typos and such. The `package.metadata` table, however, is completely +ignored by Cargo and will not be warned about. This section can be used for +tools which would like to store package configuration in `Cargo.toml`. For +example: + +```toml +[package] +name = "..." +# ... + +# Metadata used when generating an Android APK, for example. +[package.metadata.android] +package-name = "my-awesome-android-app" +assets = "path/to/static" +``` + +#### The `default-run` field + +The `default-run` field in the `[package]` section of the manifest can be used +to specify a default binary picked by [`cargo run`]. For example, when there is +both `src/bin/a.rs` and `src/bin/b.rs`: + +```toml +[package] +default-run = "a" +``` + +### Dependency sections + +See the [specifying dependencies page](specifying-dependencies.md) for +information on the `[dependencies]`, `[dev-dependencies]`, +`[build-dependencies]`, and target-specific `[target.*.dependencies]` sections. + +### The `[profile.*]` sections + +Cargo supports custom configuration of how rustc is invoked through profiles at +the top level. Any manifest may declare a profile, but only the top level +package’s profiles are actually read. All dependencies’ profiles will be +overridden. This is done so the top-level package has control over how its +dependencies are compiled. + +There are four currently supported profile names, all of which have the same +configuration available to them. Listed below is the configuration available, +along with the defaults for each profile. + +```toml +# The development profile, used for `cargo build`. +[profile.dev] +opt-level = 0 # controls the `--opt-level` the compiler builds with. + # 0-1 is good for debugging. 2 is well-optimized. Max is 3. + # 's' attempts to reduce size, 'z' reduces size even more. +debug = true # (u32 or bool) Include debug information (debug symbols). + # Equivalent to `-C debuginfo=2` compiler flag. +rpath = false # controls whether compiler should set loader paths. + # If true, passes `-C rpath` flag to the compiler. +lto = false # Link Time Optimization usually reduces size of binaries + # and static libraries. Increases compilation time. + # If true, passes `-C lto` flag to the compiler, and if a + # string is specified like 'thin' then `-C lto=thin` will + # be passed. +debug-assertions = true # controls whether debug assertions are enabled + # (e.g., debug_assert!() and arithmetic overflow checks) +codegen-units = 16 # if > 1 enables parallel code generation which improves + # compile times, but prevents some optimizations. + # Passes `-C codegen-units`. +panic = 'unwind' # panic strategy (`-C panic=...`), can also be 'abort' +incremental = true # whether or not incremental compilation is enabled + # This can be overridden globally with the CARGO_INCREMENTAL + # environment variable or `build.incremental` config + # variable. Incremental is only used for path sources. +overflow-checks = true # use overflow checks for integer arithmetic. + # Passes the `-C overflow-checks=...` flag to the compiler. + +# The release profile, used for `cargo build --release` (and the dependencies +# for `cargo test --release`, including the local library or binary). +[profile.release] +opt-level = 3 +debug = false +rpath = false +lto = false +debug-assertions = false +codegen-units = 16 +panic = 'unwind' +incremental = false +overflow-checks = false + +# The testing profile, used for `cargo test` (for `cargo test --release` see +# the `release` and `bench` profiles). +[profile.test] +opt-level = 0 +debug = 2 +rpath = false +lto = false +debug-assertions = true +codegen-units = 16 +panic = 'unwind' +incremental = true +overflow-checks = true + +# The benchmarking profile, used for `cargo bench` (and the test targets and +# unit tests for `cargo test --release`). +[profile.bench] +opt-level = 3 +debug = false +rpath = false +lto = false +debug-assertions = false +codegen-units = 16 +panic = 'unwind' +incremental = false +overflow-checks = false +``` + +### The `[features]` section + +Cargo supports features to allow expression of: + +* conditional compilation options (usable through `cfg` attributes); +* optional dependencies, which enhance a package, but are not required; and +* clusters of optional dependencies, such as `postgres`, that would include the + `postgres` package, the `postgres-macros` package, and possibly other packages + (such as development-time mocking libraries, debugging tools, etc.). + +A feature of a package is either an optional dependency, or a set of other +features. The format for specifying features is: + +```toml +[package] +name = "awesome" + +[features] +# The default set of optional packages. Most people will want to use these +# packages, but they are strictly optional. Note that `session` is not a package +# but rather another feature listed in this manifest. +default = ["jquery", "uglifier", "session"] + +# A feature with no dependencies is used mainly for conditional compilation, +# like `#[cfg(feature = "go-faster")]`. +go-faster = [] + +# The `secure-password` feature depends on the bcrypt package. This aliasing +# will allow people to talk about the feature in a higher-level way and allow +# this package to add more requirements to the feature in the future. +secure-password = ["bcrypt"] + +# Features can be used to reexport features of other packages. The `session` +# feature of package `awesome` will ensure that the `session` feature of the +# package `cookie` is also enabled. +session = ["cookie/session"] + +[dependencies] +# These packages are mandatory and form the core of this package’s distribution. +cookie = "1.2.0" +oauth = "1.1.0" +route-recognizer = "=2.1.0" + +# A list of all of the optional dependencies, some of which are included in the +# above `features`. They can be opted into by apps. +jquery = { version = "1.0.2", optional = true } +uglifier = { version = "1.5.3", optional = true } +bcrypt = { version = "*", optional = true } +civet = { version = "*", optional = true } +``` + +To use the package `awesome`: + +```toml +[dependencies.awesome] +version = "1.3.5" +default-features = false # do not include the default features, and optionally + # cherry-pick individual features +features = ["secure-password", "civet"] +``` + +#### Rules + +The usage of features is subject to a few rules: + +* Feature names must not conflict with other package names in the manifest. This + is because they are opted into via `features = [...]`, which only has a single + namespace. +* With the exception of the `default` feature, all features are opt-in. To opt + out of the default feature, use `default-features = false` and cherry-pick + individual features. +* Feature groups are not allowed to cyclically depend on one another. +* Dev-dependencies cannot be optional. +* Features groups can only reference optional dependencies. +* When a feature is selected, Cargo will call `rustc` with `--cfg + feature="${feature_name}"`. If a feature group is included, it and all of its + individual features will be included. This can be tested in code via + `#[cfg(feature = "foo")]`. + +Note that it is explicitly allowed for features to not actually activate any +optional dependencies. This allows packages to internally enable/disable +features without requiring a new dependency. + +#### Usage in end products + +One major use-case for this feature is specifying optional features in +end-products. For example, the Servo package may want to include optional +features that people can enable or disable when they build it. + +In that case, Servo will describe features in its `Cargo.toml` and they can be +enabled using command-line flags: + +```console +$ cargo build --release --features "shumway pdf" +``` + +Default features could be excluded using `--no-default-features`. + +#### Usage in packages + +In most cases, the concept of *optional dependency* in a library is best +expressed as a separate package that the top-level application depends on. + +However, high-level packages, like Iron or Piston, may want the ability to +curate a number of packages for easy installation. The current Cargo system +allows them to curate a number of mandatory dependencies into a single package +for easy installation. + +In some cases, packages may want to provide additional curation for optional +dependencies: + +* grouping a number of low-level optional dependencies together into a single + high-level feature; +* specifying packages that are recommended (or suggested) to be included by + users of the package; and +* including a feature (like `secure-password` in the motivating example) that + will only work if an optional dependency is available, and would be difficult + to implement as a separate package (for example, it may be overly difficult to + design an IO package to be completely decoupled from OpenSSL, with opt-in via + the inclusion of a separate package). + +In almost all cases, it is an antipattern to use these features outside of +high-level packages that are designed for curation. If a feature is optional, it +can almost certainly be expressed as a separate package. + +### The `[workspace]` section + +Packages can define a workspace which is a set of crates that will all share the +same `Cargo.lock` and output directory. The `[workspace]` table can be defined +as: + +```toml +[workspace] + +# Optional key, inferred from path dependencies if not present. +# Additional non-path dependencies that should be included must be given here. +# In particular, for a virtual manifest, all members have to be listed. +members = ["path/to/member1", "path/to/member2", "path/to/member3/*"] + +# Optional key, empty if not present. +exclude = ["path1", "path/to/dir2"] +``` + +Workspaces were added to Cargo as part of [RFC 1525] and have a number of +properties: + +* A workspace can contain multiple crates where one of them is the *root crate*. +* The *root crate*'s `Cargo.toml` contains the `[workspace]` table, but is not + required to have other configuration. +* Whenever any crate in the workspace is compiled, output is placed in the + *workspace root* (i.e., next to the *root crate*'s `Cargo.toml`). +* The lock file for all crates in the workspace resides in the *workspace root*. +* The `[patch]`, `[replace]` and `[profile.*]` sections in `Cargo.toml` + are only recognized + in the *root crate*'s manifest, and ignored in member crates' manifests. + +[RFC 1525]: https://github.com/rust-lang/rfcs/blob/master/text/1525-cargo-workspace.md + +The *root crate* of a workspace, indicated by the presence of `[workspace]` in +its manifest, is responsible for defining the entire workspace. All `path` +dependencies residing in the workspace directory become members. You can add +additional packages to the workspace by listing them in the `members` key. Note +that members of the workspaces listed explicitly will also have their path +dependencies included in the workspace. Sometimes a package may have a lot of +workspace members and it can be onerous to keep up to date. The path dependency +can also use [globs][globs] to match multiple paths. Finally, the `exclude` +key can be used to blacklist paths from being included in a workspace. This can +be useful if some path dependencies aren't desired to be in the workspace at +all. + +The `package.workspace` manifest key (described above) is used in member crates +to point at a workspace's root crate. If this key is omitted then it is inferred +to be the first crate whose manifest contains `[workspace]` upwards in the +filesystem. + +A crate may either specify `package.workspace` or specify `[workspace]`. That +is, a crate cannot both be a root crate in a workspace (contain `[workspace]`) +and also be a member crate of another workspace (contain `package.workspace`). + +Most of the time workspaces will not need to be dealt with as [`cargo new`] and +[`cargo init`] will handle workspace configuration automatically. + +[globs]: https://docs.rs/glob/0.2.11/glob/struct.Pattern.html + +#### Virtual Manifest + +In workspace manifests, if the `package` table is present, the workspace root +crate will be treated as a normal package, as well as a workspace. If the +`package` table is not present in a workspace manifest, it is called a *virtual +manifest*. + +#### Package selection + +In a workspace, package-related cargo commands like [`cargo build`] apply to +packages selected by `-p` / `--package` or `--all` command-line parameters. +When neither is specified, the optional `default-members` configuration is used: + +```toml +[workspace] +members = ["path/to/member1", "path/to/member2", "path/to/member3/*"] +default-members = ["path/to/member2", "path/to/member3/foo"] +``` + +When specified, `default-members` must expand to a subset of `members`. + +When `default-members` is not specified, the default is the root manifest +if it is a package, or every member manifest (as if `--all` were specified +on the command-line) for virtual workspaces. + +### The project layout + +If your package is an executable, name the main source file `src/main.rs`. If it +is a library, name the main source file `src/lib.rs`. + +Cargo will also treat any files located in `src/bin/*.rs` as executables. If your +executable consists of more than just one source file, you might also use a directory +inside `src/bin` containing a `main.rs` file which will be treated as an executable +with a name of the parent directory. + +Your package can optionally contain folders named `examples`, `tests`, and +`benches`, which Cargo will treat as containing examples, +integration tests, and benchmarks respectively. Analogous to `bin` targets, they +may be composed of single files or directories with a `main.rs` file. + +``` +▾ src/ # directory containing source files + lib.rs # the main entry point for libraries and packages + main.rs # the main entry point for packages producing executables + ▾ bin/ # (optional) directory containing additional executables + *.rs + ▾ */ # (optional) directories containing multi-file executables + main.rs +▾ examples/ # (optional) examples + *.rs + ▾ */ # (optional) directories containing multi-file examples + main.rs +▾ tests/ # (optional) integration tests + *.rs + ▾ */ # (optional) directories containing multi-file tests + main.rs +▾ benches/ # (optional) benchmarks + *.rs + ▾ */ # (optional) directories containing multi-file benchmarks + main.rs +``` + +To structure your code after you've created the files and folders for your +package, you should remember to use Rust's module system, which you can read +about in [the +book](../../book/ch07-00-managing-growing-projects-with-packages-crates-and-modules.html). + +See [Configuring a target](#configuring-a-target) below for more details on +manually configuring target settings. See [Target +auto-discovery](#target-auto-discovery) below for more information on +controlling how Cargo automatically infers targets. + +### Examples + +Files located under `examples` are example uses of the functionality provided by +the library. When compiled, they are placed in the `target/examples` directory. + +They can compile either as executables (with a `main()` function) or libraries +and pull in the library by using `extern crate `. They are +compiled when you run your tests to protect them from bitrotting. + +You can run individual executable examples with the command `cargo run --example +`. + +Specify `crate-type` to make an example be compiled as a library (additional +information about crate types is available in +[The Rust Reference](../../reference/linkage.html)): + +```toml +[[example]] +name = "foo" +crate-type = ["staticlib"] +``` + +You can build individual library examples with the command `cargo build +--example `. + +### Tests + +When you run [`cargo test`], Cargo will: + +* compile and run your library’s unit tests, which are in the files reachable + from `lib.rs` (naturally, any sections marked with `#[cfg(test)]` will be + considered at this stage); +* compile and run your library’s documentation tests, which are embedded inside + of documentation blocks; +* compile and run your library’s [integration tests](#integration-tests); and +* compile your library’s examples. + +#### Integration tests + +Each file in `tests/*.rs` is an integration test. When you run [`cargo test`], +Cargo will compile each of these files as a separate crate. The crate can link +to your library by using `extern crate `, like any other code that +depends on it. + +Cargo will not automatically compile files inside subdirectories of `tests`, but +an integration test can import modules from these directories as usual. For +example, if you want several integration tests to share some code, you can put +the shared code in `tests/common/mod.rs` and then put `mod common;` in each of +the test files. + +### Configuring a target + +All of the `[[bin]]`, `[lib]`, `[[bench]]`, `[[test]]`, and `[[example]]` +sections support similar configuration for specifying how a target should be +built. The double-bracket sections like `[[bin]]` are array-of-table of +[TOML](https://github.com/toml-lang/toml#array-of-tables), which means you can +write more than one `[[bin]]` section to make several executables in your crate. + +The example below uses `[lib]`, but it also applies to all other sections +as well. All values listed are the defaults for that option unless otherwise +specified. + +```toml +[package] +# ... + +[lib] +# The name of a target is the name of the library that will be generated. This +# is defaulted to the name of the package, with any dashes replaced +# with underscores. (Rust `extern crate` declarations reference this name; +# therefore the value must be a valid Rust identifier to be usable.) +name = "foo" + +# This field points at where the crate is located, relative to the `Cargo.toml`. +path = "src/lib.rs" + +# A flag for enabling unit tests for this target. This is used by `cargo test`. +test = true + +# A flag for enabling documentation tests for this target. This is only relevant +# for libraries, it has no effect on other sections. This is used by +# `cargo test`. +doctest = true + +# A flag for enabling benchmarks for this target. This is used by `cargo bench`. +bench = true + +# A flag for enabling documentation of this target. This is used by `cargo doc`. +doc = true + +# If the target is meant to be a compiler plugin, this field must be set to true +# for Cargo to correctly compile it and make it available for all dependencies. +plugin = false + +# If the target is meant to be a "macros 1.1" procedural macro, this field must +# be set to true. +proc-macro = false + +# If set to false, `cargo test` will omit the `--test` flag to rustc, which +# stops it from generating a test harness. This is useful when the binary being +# built manages the test runner itself. +harness = true + +# If set then a target can be configured to use a different edition than the +# `[package]` is configured to use, perhaps only compiling a library with the +# 2018 edition or only compiling one unit test with the 2015 edition. By default +# all targets are compiled with the edition specified in `[package]`. +edition = '2015' + +# Here's an example of a TOML "array of tables" section, in this case specifying +# a binary target name and path. +[[bin]] +name = "my-cool-binary" +path = "src/my-cool-binary.rs" +``` + +#### Target auto-discovery + +By default, Cargo automatically determines the targets to build based on the +[layout of the files](#the-project-layout) on the filesystem. The target +configuration tables, such as `[lib]`, `[[bin]]`, `[[test]]`, `[[bench]]`, or +`[[example]]`, can be used to add additional targets that don't follow the +standard directory layout. + +The automatic target discovery can be disabled so that only manually +configured targets will be built. Setting the keys `autobins`, `autoexamples`, +`autotests`, or `autobenches` to `false` in the `[package]` section will +disable auto-discovery of the corresponding target type. + +Disabling automatic discovery should only be needed for specialized +situations. For example, if you have a library where you want a *module* named +`bin`, this would present a problem because Cargo would usually attempt to +compile anything in the `bin` directory as an executable. Here is a sample +layout of this scenario: + +``` +├── Cargo.toml +└── src +    ├── lib.rs +    └── bin +       └── mod.rs +``` + +To prevent Cargo from inferring `src/bin/mod.rs` as an executable, set +`autobins = false` in `Cargo.toml` to disable auto-discovery: + +```toml +[package] +# … +autobins = false +``` + +> **Note**: For packages with the 2015 edition, the default for auto-discovery +> is `false` if at least one target is manually defined in `Cargo.toml`. +> Beginning with the 2018 edition, the default is always `true`. + +#### The `required-features` field (optional) + +The `required-features` field specifies which features the target needs in order +to be built. If any of the required features are not selected, the target will +be skipped. This is only relevant for the `[[bin]]`, `[[bench]]`, `[[test]]`, +and `[[example]]` sections, it has no effect on `[lib]`. + +```toml +[features] +# ... +postgres = [] +sqlite = [] +tools = [] + +[[bin]] +# ... +required-features = ["postgres", "tools"] +``` + +#### Building dynamic or static libraries + +If your package produces a library, you can specify which kind of library to +build by explicitly listing the library in your `Cargo.toml`: + +```toml +# ... + +[lib] +name = "..." +crate-type = ["dylib"] # could be `staticlib` as well +``` + +The available options are `dylib`, `rlib`, `staticlib`, `cdylib`, and +`proc-macro`. + +You can read more about the different crate types in the +[Rust Reference Manual](../../reference/linkage.html) + +### The `[patch]` Section + +This section of Cargo.toml can be used to [override dependencies][replace] with +other copies. The syntax is similar to the `[dependencies]` section: + +```toml +[patch.crates-io] +foo = { git = 'https://github.com/example/foo' } +bar = { path = 'my/local/bar' } + +[dependencies.baz] +git = 'https://github.com/example/baz' + +[patch.'https://github.com/example/baz'] +baz = { git = 'https://github.com/example/patched-baz', branch = 'my-branch' } +``` + +The `[patch]` table is made of dependency-like sub-tables. Each key after +`[patch]` is a URL of the source that is being patched, or the name of a +registry. The name `crates-io` may be used to override the default registry +[crates.io]. The first `[patch]` in the example above demonstrates overriding +[crates.io], and the second `[patch]` demonstrates overriding a git source. + +Each entry in these tables is a normal dependency specification, the same as +found in the `[dependencies]` section of the manifest. The dependencies listed +in the `[patch]` section are resolved and used to patch the source at the +URL specified. The above manifest snippet patches the `crates-io` source (e.g. +crates.io itself) with the `foo` crate and `bar` crate. It also +patches the `https://github.com/example/baz` source with a `my-branch` that +comes from elsewhere. + +Sources can be patched with versions of crates that do not exist, and they can +also be patched with versions of crates that already exist. If a source is +patched with a crate version that already exists in the source, then the +source's original crate is replaced. + +More information about overriding dependencies can be found in the [overriding +dependencies][replace] section of the documentation and [RFC 1969] for the +technical specification of this feature. + +[RFC 1969]: https://github.com/rust-lang/rfcs/pull/1969 +[crates.io]: https://crates.io/ +[replace]: specifying-dependencies.md#overriding-dependencies + +### The `[replace]` Section + +This section of Cargo.toml can be used to [override dependencies][replace] with +other copies. The syntax is similar to the `[dependencies]` section: + +```toml +[replace] +"foo:0.1.0" = { git = 'https://github.com/example/foo' } +"bar:1.0.2" = { path = 'my/local/bar' } +``` + +Each key in the `[replace]` table is a [package ID +specification](pkgid-spec.md), which allows arbitrarily choosing a node in the +dependency graph to override. The value of each key is the same as the +`[dependencies]` syntax for specifying dependencies, except that you can't +specify features. Note that when a crate is overridden the copy it's overridden +with must have both the same name and version, but it can come from a different +source (e.g., git or a local path). + +More information about overriding dependencies can be found in the [overriding +dependencies][replace] section of the documentation. + +[`cargo build`]: ../commands/cargo-build.md +[`cargo init`]: ../commands/cargo-init.md +[`cargo new`]: ../commands/cargo-new.md +[`cargo run`]: ../commands/cargo-run.md +[`cargo test`]: ../commands/cargo-test.md +[spdx-2.1-license-expressions]: https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60 +[spdx-license-list-2.4]: https://github.com/spdx/license-list-data/tree/v2.4 +[spdx-license-list]: https://spdx.org/licenses/ diff --git a/src/doc/src/reference/pkgid-spec.md b/src/doc/src/reference/pkgid-spec.md new file mode 100644 index 00000000000..c921d51ba31 --- /dev/null +++ b/src/doc/src/reference/pkgid-spec.md @@ -0,0 +1,44 @@ +## Package ID Specifications + +### Package ID specifications + +Subcommands of Cargo frequently need to refer to a particular package within a +dependency graph for various operations like updating, cleaning, building, etc. +To solve this problem, Cargo supports Package ID Specifications. A specification +is a string which is used to uniquely refer to one package within a graph of +packages. + +#### Specification grammar + +The formal grammar for a Package Id Specification is: + +```notrust +pkgid := pkgname + | [ proto "://" ] hostname-and-path [ "#" ( pkgname | semver ) ] +pkgname := name [ ":" semver ] + +proto := "http" | "git" | ... +``` + +Here, brackets indicate that the contents are optional. + +#### Example specifications + +These could all be references to a package `foo` version `1.2.3` from the +registry at `crates.io` + +| pkgid | name | version | url | +|:-----------------------------|:-----:|:-------:|:----------------------:| +| `foo` | `foo` | `*` | `*` | +| `foo:1.2.3` | `foo` | `1.2.3` | `*` | +| `crates.io/foo` | `foo` | `*` | `*://crates.io/foo` | +| `crates.io/foo#1.2.3` | `foo` | `1.2.3` | `*://crates.io/foo` | +| `crates.io/bar#foo:1.2.3` | `foo` | `1.2.3` | `*://crates.io/bar` | +| `https://crates.io/foo#1.2.3`| `foo` | `1.2.3` | `https://crates.io/foo` | + +#### Brevity of specifications + +The goal of this is to enable both succinct and exhaustive syntaxes for +referring to packages in a dependency graph. Ambiguous references may refer to +one or more packages. Most commands generate an error if more than one package +could be referred to with the same specification. diff --git a/src/doc/src/reference/publishing.md b/src/doc/src/reference/publishing.md new file mode 100644 index 00000000000..daba1a0beaa --- /dev/null +++ b/src/doc/src/reference/publishing.md @@ -0,0 +1,249 @@ +## Publishing on crates.io + +Once you've got a library that you'd like to share with the world, it's time to +publish it on [crates.io]! Publishing a crate is when a specific +version is uploaded to be hosted on [crates.io]. + +Take care when publishing a crate, because a publish is **permanent**. The +version can never be overwritten, and the code cannot be deleted. There is no +limit to the number of versions which can be published, however. + +### Before your first publish + +First thing’s first, you’ll need an account on [crates.io] to acquire +an API token. To do so, [visit the home page][crates.io] and log in via a GitHub +account (required for now). After this, visit your [Account +Settings](https://crates.io/me) page and run the [`cargo login`] command +specified. + +```console +$ cargo login abcdefghijklmnopqrstuvwxyz012345 +``` + +This command will inform Cargo of your API token and store it locally in your +`~/.cargo/credentials`. Note that this token is a **secret** and should not be +shared with anyone else. If it leaks for any reason, you should regenerate it +immediately. + +### Before publishing a new crate + +Keep in mind that crate names on [crates.io] are allocated on a first-come-first- +serve basis. Once a crate name is taken, it cannot be used for another crate. + +Check out the [metadata you can +specify](manifest.md#package-metadata) in `Cargo.toml` to ensure +your crate can be discovered more easily! Before publishing, make sure you have +filled out the following fields: + +- `authors` +- `license` or `license-file` +- `description` +- `homepage` +- `documentation` +- `repository` + +It would also be a good idea to include some `keywords` and `categories`, +though they are not required. + +If you are publishing a library, you may also want to consult the [Rust API +Guidelines]. + +#### Packaging a crate + +The next step is to package up your crate and upload it to [crates.io]. For +this we’ll use the [`cargo publish`] subcommand. This command performs the following +steps: + +1. Perform some verification checks on your package. +2. Compress your source code into a `.crate` file. +3. Extract the `.crate` file into a temporary directory and verify that it + compiles. +4. Upload the `.crate` file to [crates.io]. +5. The registry will perform some additional checks on the uploaded package + before adding it. + +It is recommended that you first run `cargo publish --dry-run` (or [`cargo +package`] which is equivalent) to ensure there aren't any warnings or errors +before publishing. This will perform the first three steps listed above. + +```console +$ cargo publish --dry-run +``` + +You can inspect the generated `.crate` file in the `target/package` directory. +[crates.io] currently has a 10MB size limit on the `.crate` file. You may want +to check the size of the `.crate` file to ensure you didn't accidentally +package up large assets that are not required to build your package, such as +test data, website documentation, or code generation. You can check which +files are included with the following command: + +```console +$ cargo package --list +``` + +Cargo will automatically ignore files ignored by your version control system +when packaging, but if you want to specify an extra set of files to ignore you +can use the [`exclude` +key](manifest.md#the-exclude-and-include-fields-optional) in the +manifest: + +```toml +[package] +# ... +exclude = [ + "public/assets/*", + "videos/*", +] +``` + +If you’d rather explicitly list the files to include, Cargo also supports an +`include` key, which if set, overrides the `exclude` key: + +```toml +[package] +# ... +include = [ + "**/*.rs", + "Cargo.toml", +] +``` + +### Uploading the crate + +When you are ready to publish, use the [`cargo publish`] command +to upload to [crates.io]: + +```console +$ cargo publish +``` + +And that’s it, you’ve now published your first crate! + +### Publishing a new version of an existing crate + +In order to release a new version, change the `version` value specified in +your `Cargo.toml` manifest. Keep in mind [the semver +rules](manifest.md#the-version-field), and consult [RFC 1105] for +what constitutes a semver-breaking change. Then run [`cargo publish`] as +described above to upload the new version. + +### Managing a crates.io-based crate + +Management of crates is primarily done through the command line `cargo` tool +rather than the [crates.io] web interface. For this, there are a few subcommands +to manage a crate. + +#### `cargo yank` + +Occasions may arise where you publish a version of a crate that actually ends up +being broken for one reason or another (syntax error, forgot to include a file, +etc.). For situations such as this, Cargo supports a “yank” of a version of a +crate. + +```console +$ cargo yank --vers 1.0.1 +$ cargo yank --vers 1.0.1 --undo +``` + +A yank **does not** delete any code. This feature is not intended for deleting +accidentally uploaded secrets, for example. If that happens, you must reset +those secrets immediately. + +The semantics of a yanked version are that no new dependencies can be created +against that version, but all existing dependencies continue to work. One of the +major goals of [crates.io] is to act as a permanent archive of crates that does +not change over time, and allowing deletion of a version would go against this +goal. Essentially a yank means that all packages with a `Cargo.lock` will not +break, while any future `Cargo.lock` files generated will not list the yanked +version. + +#### `cargo owner` + +A crate is often developed by more than one person, or the primary maintainer +may change over time! The owner of a crate is the only person allowed to publish +new versions of the crate, but an owner may designate additional owners. + +```console +$ cargo owner --add my-buddy +$ cargo owner --remove my-buddy +$ cargo owner --add github:rust-lang:owners +$ cargo owner --remove github:rust-lang:owners +``` + +The owner IDs given to these commands must be GitHub user names or GitHub teams. + +If a user name is given to `--add`, that user is invited as a “named” owner, with +full rights to the crate. In addition to being able to publish or yank versions +of the crate, they have the ability to add or remove owners, *including* the +owner that made *them* an owner. Needless to say, you shouldn’t make people you +don’t fully trust into a named owner. In order to become a named owner, a user +must have logged into [crates.io] previously. + +If a team name is given to `--add`, that team is invited as a “team” owner, with +restricted right to the crate. While they have permission to publish or yank +versions of the crate, they *do not* have the ability to add or remove owners. +In addition to being more convenient for managing groups of owners, teams are +just a bit more secure against owners becoming malicious. + +The syntax for teams is currently `github:org:team` (see examples above). +In order to invite a team as an owner one must be a member of that team. No +such restriction applies to removing a team as an owner. + +### GitHub permissions + +Team membership is not something GitHub provides simple public access to, and it +is likely for you to encounter the following message when working with them: + +> It looks like you don’t have permission to query a necessary property from +GitHub to complete this request. You may need to re-authenticate on [crates.io] +to grant permission to read GitHub org memberships. Just go to +. + +This is basically a catch-all for “you tried to query a team, and one of the +five levels of membership access control denied this”. That is not an +exaggeration. GitHub’s support for team access control is Enterprise Grade. + +The most likely cause of this is simply that you last logged in before this +feature was added. We originally requested *no* permissions from GitHub when +authenticating users, because we didn’t actually ever use the user’s token for +anything other than logging them in. However to query team membership on your +behalf, we now require [the `read:org` scope][oauth-scopes]. + +You are free to deny us this scope, and everything that worked before teams +were introduced will keep working. However you will never be able to add a team +as an owner, or publish a crate as a team owner. If you ever attempt to do this, +you will get the error above. You may also see this error if you ever try to +publish a crate that you don’t own at all, but otherwise happens to have a team. + +If you ever change your mind, or just aren’t sure if [crates.io] has sufficient +permission, you can always go to , which will prompt you +for permission if [crates.io] doesn’t have all the scopes it would like to. + +An additional barrier to querying GitHub is that the organization may be +actively denying third party access. To check this, you can go to: + + https://github.com/organizations/:org/settings/oauth_application_policy + +where `:org` is the name of the organization (e.g., `rust-lang`). You may see +something like: + +![Organization Access Control](../images/org-level-acl.png) + +Where you may choose to explicitly remove [crates.io] from your organization’s +blacklist, or simply press the “Remove Restrictions” button to allow all third +party applications to access this data. + +Alternatively, when [crates.io] requested the `read:org` scope, you could have +explicitly whitelisted [crates.io] querying the org in question by pressing +the “Grant Access” button next to its name: + +![Authentication Access Control](../images/auth-level-acl.png) + +[RFC 1105]: https://github.com/rust-lang/rfcs/blob/master/text/1105-api-evolution.md +[Rust API Guidelines]: https://rust-lang-nursery.github.io/api-guidelines/ +[`cargo login`]: ../commands/cargo-login.md +[`cargo package`]: ../commands/cargo-package.md +[`cargo publish`]: ../commands/cargo-publish.md +[crates.io]: https://crates.io/ +[oauth-scopes]: https://developer.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/ + diff --git a/src/doc/src/reference/registries.md b/src/doc/src/reference/registries.md new file mode 100644 index 00000000000..868bb45a209 --- /dev/null +++ b/src/doc/src/reference/registries.md @@ -0,0 +1,590 @@ +## Registries + +Cargo installs crates and fetches dependencies from a "registry". The default +registry is [crates.io]. A registry contains an "index" which contains a +searchable list of available crates. A registry may also provide a web API to +support publishing new crates directly from Cargo. + +> Note: If you are interested in mirroring or vendoring an existing registry, +> take a look at [Source Replacement]. + +### Using an Alternate Registry + +To use a registry other than [crates.io], the name and index URL of the +registry must be added to a [`.cargo/config` file][config]. The `registries` +table has a key for each registry, for example: + +```toml +[registries] +my-registry = { index = "https://my-intranet:8080/git/index" } +``` + +The `index` key should be a URL to a git repository with the registry's index. +A crate can then depend on a crate from another registry by specifying the +`registry` key and a value of the registry's name in that dependency's entry +in `Cargo.toml`: + +```toml +# Sample Cargo.toml +[package] +name = "my-project" +version = "0.1.0" + +[dependencies] +other-crate = { version = "1.0", registry = "my-registry" } +``` + +As with most config values, the index may be specified with an environment +variable instead of a config file. For example, setting the following +environment variable will accomplish the same thing as defining a config file: + +``` +CARGO_REGISTRIES_MY_REGISTRY_INDEX=https://my-intranet:8080/git/index +``` + +> Note: [crates.io] does not accept packages that depend on crates from other +> registries. + +### Publishing to an Alternate Registry + +If the registry supports web API access, then packages can be published +directly to the registry from Cargo. Several of Cargo's commands such as +[`cargo publish`] take a `--registry` command-line flag to indicate which +registry to use. For example, to publish the package in the current directory: + +1. `cargo login --registry=my-registry` + + This only needs to be done once. You must enter the secret API token + retrieved from the registry's website. Alternatively the token may be + passed directly to the `publish` command with the `--token` command-line + flag or an environment variable with the name of the registry such as + `CARGO_REGISTRIES_MY_REGISTRY_TOKEN`. + +2. `cargo publish --registry=my-registry` + +Instead of always passing the `--registry` command-line option, the default +registry may be set in [`.cargo/config`][config] with the `registry.default` +key. + +Setting the `package.publish` key in the `Cargo.toml` manifest restricts which +registries the package is allowed to be published to. This is useful to +prevent accidentally publishing a closed-source package to [crates.io]. The +value may be a list of registry names, for example: + +```toml +[package] +# ... +publish = ["my-registry"] +``` + +The `publish` value may also be `false` to restrict all publishing, which is +the same as an empty list. + +The authentication information saved by [`cargo login`] is stored in the +`credentials` file in the Cargo home directory (default `$HOME/.cargo`). It +has a separate table for each registry, for example: + +```toml +[registries.my-registry] +token = "854DvwSlUwEHtIo3kWy6x7UCPKHfzCmy" +``` + +### Running a Registry + +A minimal registry can be implemented by having a git repository that contains +an index, and a server that contains the compressed `.crate` files created by +[`cargo package`]. Users won't be able to use Cargo to publish to it, but this +may be sufficient for closed environments. + +A full-featured registry that supports publishing will additionally need to +have a web API service that conforms to the API used by Cargo. The web API is +documented below. + +At this time, there is no widely used software for running a custom registry. +There is interest in documenting projects that implement registry support, or +existing package caches that add support for Cargo. + +### Index Format + +The following defines the format of the index. New features are occasionally +added, which are only understood starting with the version of Cargo that +introduced them. Older versions of Cargo may not be able to use packages that +make use of new features. However, the format for older packages should not +change, so older versions of Cargo should be able to use them. + +The index is stored in a git repository so that Cargo can efficiently fetch +incremental updates to the index. In the root of the repository is a file +named `config.json` which contains JSON information used by Cargo for +accessing the registry. This is an example of what the [crates.io] config file +looks like: + +```javascript +{ + "dl": "https://crates.io/api/v1/crates", + "api": "https://crates.io" +} +``` + +The keys are: +- `dl`: This is the URL for downloading crates listed in the index. The value + may have the markers `{crate}` and `{version}` which are replaced with the + name and version of the crate to download. If the markers are not present, + then the value `/{crate}/{version}/download` is appended to the end. +- `api`: This is the base URL for the web API. This key is optional, but if it + is not specified, commands such as [`cargo publish`] will not work. The web + API is described below. + +The download endpoint should send the `.crate` file for the requested package. +Cargo supports https, http, and file URLs, HTTP redirects, HTTP1 and HTTP2. +The exact specifics of TLS support depend on the platform that Cargo is +running on, the version of Cargo, and how it was compiled. + +The rest of the index repository contains one file for each package, where the +filename is the name of the package in lowercase. Each version of the package +has a separate line in the file. The files are organized in a tier of +directories: + +- Packages with 1 character names are placed in a directory named `1`. +- Packages with 2 character names are placed in a directory named `2`. +- Packages with 3 character names are placed in the directory + `3/{first-character}` where `{first-character}` is the first character of + the package name. +- All other packages are stored in directories named + `{first-two}/{second-two}` where the top directory is the first two + characters of the package name, and the next subdirectory is the third and + fourth characters of the package name. For example, `cargo` would be stored + in a file named `ca/rg/cargo`. + +> Note: Although the index filenames are in lowercase, the fields that contain +> package names in `Cargo.toml` and the index JSON data are case-sensitive and +> may contain upper and lower case characters. + +Registries may want to consider enforcing limitations on package names added +to their index. Cargo itself allows names with any [alphanumeric], `-`, or `_` +character. For example, [crates.io] imposes relatively strict limitations, +such as requiring it to be a valid Rust identifier, only allowing ASCII +characters, under a specific length, and rejects reserved names such as +Windows special filenames like "nul". + +Each line in a package file contains a JSON object that describes a published +version of the package. The following is a pretty-printed example with comments +explaining the format of the entry. + +```javascript +{ + // The name of the package. + // This must only contain alphanumeric, `-`, or `_` characters. + "name": "foo", + // The version of the package this row is describing. + // This must be a valid version number according to the Semantic + // Versioning 2.0.0 spec at https://semver.org/. + "vers": "0.1.0", + // Array of direct dependencies of the package. + "deps": [ + { + // Name of the dependency. + // If the dependency is renamed from the original package name, + // this is the new name. The original package name is stored in + // the `package` field. + "name": "rand", + // The semver requirement for this dependency. + // This must be a valid version requirement defined at + // https://github.com/steveklabnik/semver#requirements. + "req": "^0.6", + // Array of features (as strings) enabled for this dependency. + "features": ["i128_support"], + // Boolean of whether or not this is an optional dependency. + "optional": false, + // Boolean of whether or not default features are enabled. + "default_features": true, + // The target platform for the dependency. + // null if not a target dependency. + // Otherwise, a string such as "cfg(windows)". + "target": null, + // The dependency kind. + // "dev", "build", or "normal". + // Note: this is a required field, but a small number of entries + // exist in the crates.io index with either a missing or null + // `kind` field due to implementation bugs. + "kind": "normal", + // The URL of the index of the registry where this dependency is + // from as a string. If not specified or null, it is assumed the + // dependency is in the current registry. + "registry": null, + // If the dependency is renamed, this is a string of the actual + // package name. If not specified or null, this dependency is not + // renamed. + "package": null, + } + ], + // A SHA256 checksum of the `.crate` file. + "cksum": "d867001db0e2b6e0496f9fac96930e2d42233ecd3ca0413e0753d4c7695d289c", + // Set of features defined for the package. + // Each feature maps to an array of features or dependencies it enables. + "features": { + "extras": ["rand/simd_support"] + }, + // Boolean of whether or not this version has been yanked. + "yanked": false, + // The `links` string value from the package's manifest, or null if not + // specified. This field is optional and defaults to null. + "links": null +} +``` + +The JSON objects should not be modified after they are added except for the +`yanked` field whose value may change at any time. + +### Web API + +A registry may host a web API at the location defined in `config.json` to +support any of the actions listed below. + +Cargo includes the `Authorization` header for requests that require +authentication. The header value is the API token. The server should respond +with a 403 response code if the token is not valid. Users are expected to +visit the registry's website to obtain a token, and Cargo can store the token +using the [`cargo login`] command, or by passing the token on the +command-line. + +Responses use a 200 response code for both success and errors. Cargo looks at +the JSON response to determine if there was success or failure. Failure +responses have a JSON object with the following structure: + +```javascript +{ + // Array of errors to display to the user. + "errors": [ + { + // The error message as a string. + "detail": "error message text" + } + ] +} +``` + +Servers may also respond with a 404 response code to indicate the requested +resource is not found (for example, an unknown crate name). However, using a +200 response with an `errors` object allows a registry to provide a more +detailed error message if desired. + +For backwards compatibility, servers should ignore any unexpected query +parameters or JSON fields. If a JSON field is missing, it should be assumed to +be null. The endpoints are versioned with the `v1` component of the path, and +Cargo is responsible for handling backwards compatibility fallbacks should any +be required in the future. + +Cargo sets the following headers for all requests: + +- `Content-Type`: `application/json` +- `Accept`: `application/json` +- `User-Agent`: The Cargo version such as `cargo 1.32.0 (8610973aa + 2019-01-02)`. This may be modified by the user in a configuration value. + Added in 1.29. + +#### Publish + +- Endpoint: `/api/v1/crates/new` +- Method: PUT +- Authorization: Included + +The publish endpoint is used to publish a new version of a crate. The server +should validate the crate, make it available for download, and add it to the +index. + +The body of the data sent by Cargo is: + +- 32-bit unsigned little-endian integer of the length of JSON data. +- Metadata of the package as a JSON object. +- 32-bit unsigned little-endian integer of the length of the `.crate` file. +- The `.crate` file. + +The following is a commented example of the JSON object. Some notes of some +restrictions imposed by [crates.io] are included only to illustrate some +suggestions on types of validation that may be done, and should not be +considered as an exhaustive list of restrictions [crates.io] imposes. + +```javascript +{ + // The name of the package. + "name": "foo", + // The version of the package being published. + "vers": "0.1.0", + // Array of direct dependencies of the package. + "deps": [ + { + // Name of the dependency. + // If the dependency is renamed from the original package name, + // this is the original name. The new package name is stored in + // the `explicit_name_in_toml` field. + "name": "rand", + // The semver requirement for this dependency. + "version_req": "^0.6", + // Array of features (as strings) enabled for this dependency. + "features": ["i128_support"], + // Boolean of whether or not this is an optional dependency. + "optional": false, + // Boolean of whether or not default features are enabled. + "default_features": true, + // The target platform for the dependency. + // null if not a target dependency. + // Otherwise, a string such as "cfg(windows)". + "target": null, + // The dependency kind. + // "dev", "build", or "normal". + "kind": "normal", + // The URL of the index of the registry where this dependency is + // from as a string. If not specified or null, it is assumed the + // dependency is in the current registry. + "registry": null, + // If the dependency is renamed, this is a string of the new + // package name. If not specified or null, this dependency is not + // renamed. + "explicit_name_in_toml": null, + } + ], + // Set of features defined for the package. + // Each feature maps to an array of features or dependencies it enables. + // Cargo does not impose limitations on feature names, but crates.io + // requires alphanumeric ASCII, `_` or `-` characters. + "features": { + "extras": ["rand/simd_support"] + }, + // List of strings of the authors. + // May be empty. crates.io requires at least one entry. + "authors": ["Alice "], + // Description field from the manifest. + // May be null. crates.io requires at least some content. + "description": null, + // String of the URL to the website for this package's documentation. + // May be null. + "documentation": null, + // String of the URL to the website for this package's home page. + // May be null. + "homepage": null, + // String of the content of the README file. + // May be null. + "readme": null, + // String of a relative path to a README file in the crate. + // May be null. + "readme_file": null, + // Array of strings of keywords for the package. + "keywords": [], + // Array of strings of categories for the package. + "categories": [], + // String of the license for the package. + // May be null. crates.io requires either `license` or `license_file` to be set. + "license": null, + // String of a relative path to a license file in the crate. + // May be null. + "license_file": null, + // String of the URL to the website for the source repository of this package. + // May be null. + "repository": null, + // Optional object of "status" badges. Each value is an object of + // arbitrary string to string mappings. + // crates.io has special interpretation of the format of the badges. + "badges": { + "travis-ci": { + "branch": "master", + "repository": "rust-lang/cargo" + } + }, + // The `links` string value from the package's manifest, or null if not + // specified. This field is optional and defaults to null. + "links": null, +} +``` + +A successful response includes the JSON object: + +```javascript +{ + // Optional object of warnings to display to the user. + "warnings": { + // Array of strings of categories that are invalid and ignored. + "invalid_categories": [], + // Array of strings of badge names that are invalid and ignored. + "invalid_badges": [], + // Array of strings of arbitrary warnings to display to the user. + "other": [] + } +} +``` + +#### Yank + +- Endpoint: `/api/v1/crates/{crate_name}/{version}/yank` +- Method: DELETE +- Authorization: Included + +The yank endpoint will set the `yank` field of the given version of a crate to +`true` in the index. + +A successful response includes the JSON object: + +```javascript +{ + // Indicates the delete succeeded, always true. + "ok": true, +} +``` + +#### Unyank + +- Endpoint: `/api/v1/crates/{crate_name}/{version}/unyank` +- Method: PUT +- Authorization: Included + +The unyank endpoint will set the `yank` field of the given version of a crate +to `false` in the index. + +A successful response includes the JSON object: + +```javascript +{ + // Indicates the delete succeeded, always true. + "ok": true, +} +``` + +#### Owners + +Cargo does not have an inherent notion of users and owners, but it does +provide the `owner` command to assist managing who has authorization to +control a crate. It is up to the registry to decide exactly how users and +owners are handled. See the [publishing documentation] for a description of +how [crates.io] handles owners via GitHub users and teams. + +##### Owners: List + +- Endpoint: `/api/v1/crates/{crate_name}/owners` +- Method: GET +- Authorization: Included + +The owners endpoint returns a list of owners of the crate. + +A successful response includes the JSON object: + +```javascript +{ + // Array of owners of the crate. + "users": [ + { + // Unique unsigned 32-bit integer of the owner. + "id": 70, + // The unique username of the owner. + "login": "github:rust-lang:core", + // Name of the owner. + // This is optional and may be null. + "name": "Core", + } + ] +} +``` + +##### Owners: Add + +- Endpoint: `/api/v1/crates/{crate_name}/owners` +- Method: PUT +- Authorization: Included + +A PUT request will send a request to the registry to add a new owner to a +crate. It is up to the registry how to handle the request. For example, +[crates.io] sends an invite to the user that they must accept before being +added. + +The request should include the following JSON object: + +```javascript +{ + // Array of `login` strings of owners to add. + "users": ["login_name"] +} +``` + +A successful response includes the JSON object: + +```javascript +{ + // Indicates the add succeeded, always true. + "ok": true, + // A string to be displayed to the user. + "msg": "user ehuss has been invited to be an owner of crate cargo" +} +``` + +##### Owners: Remove + +- Endpoint: `/api/v1/crates/{crate_name}/owners` +- Method: DELETE +- Authorization: Included + +A DELETE request will remove an owner from a crate. The request should include +the following JSON object: + +```javascript +{ + // Array of `login` strings of owners to remove. + "users": ["login_name"] +} +``` + +A successful response includes the JSON object: + +```javascript +{ + // Indicates the remove succeeded, always true. + "ok": true +} +``` + +#### Search + +- Endpoint: `/api/v1/crates` +- Method: GET +- Query Parameters: + - `q`: The search query string. + - `per_page`: Number of results, default 10, max 100. + +The search request will perform a search for crates, using criteria defined on +the server. + +A successful response includes the JSON object: + +```javascript +{ + // Array of results. + "crates": [ + { + // Name of the crate. + "name": "rand", + // The highest version available. + "max_version": "0.6.1", + // Textual description of the crate. + "description": "Random number generators and other randomness functionality.\n", + } + ], + "meta": { + // Total number of results available on the server. + "total": 119 + } +} +``` + +#### Login + +- Endpoint: `/me` + +The "login" endpoint is not an actual API request. It exists solely for the +[`cargo login`] command to display a URL to instruct a user to visit in a web +browser to log in and retrieve an API token. + +[Source Replacement]: source-replacement.md +[`cargo login`]: ../commands/cargo-login.md +[`cargo package`]: ../commands/cargo-package.md +[`cargo publish`]: ../commands/cargo-publish.md +[alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric +[config]: config.md +[crates.io]: https://crates.io/ +[publishing documentation]: publishing.md#cargo-owner diff --git a/src/doc/src/reference/source-replacement.md b/src/doc/src/reference/source-replacement.md new file mode 100644 index 00000000000..49f048ecba1 --- /dev/null +++ b/src/doc/src/reference/source-replacement.md @@ -0,0 +1,122 @@ +## Source Replacement + +This document is about replacing the crate index. You can read about overriding +dependencies in the [overriding dependencies][overriding] section of this +documentation. + +A *source* is a provider that contains crates that may be included as +dependencies for a package. Cargo supports the ability to **replace one source +with another** to express strategies such as: + +* Vendoring - custom sources can be defined which represent crates on the local + filesystem. These sources are subsets of the source that they're replacing and + can be checked into packages if necessary. + +* Mirroring - sources can be replaced with an equivalent version which acts as a + cache for crates.io itself. + +Cargo has a core assumption about source replacement that the source code is +exactly the same from both sources. Note that this also means that +a replacement source is not allowed to have crates which are not present in the +original source. + +As a consequence, source replacement is not appropriate for situations such as +patching a dependency or a private registry. Cargo supports patching +dependencies through the usage of [the `[replace]` key][replace-section], and +private registry support is described in [Registries][registries]. + +[replace-section]: manifest.md#the-replace-section +[overriding]: specifying-dependencies.md#overriding-dependencies +[registries]: registries.md + +### Configuration + +Configuration of replacement sources is done through [`.cargo/config`][config] +and the full set of available keys are: + +```toml +# The `source` table is where all keys related to source-replacement +# are stored. +[source] + +# Under the `source` table are a number of other tables whose keys are a +# name for the relevant source. For example this section defines a new +# source, called `my-vendor-source`, which comes from a directory +# located at `vendor` relative to the directory containing this `.cargo/config` +# file +[source.my-vendor-source] +directory = "vendor" + +# The crates.io default source for crates is available under the name +# "crates-io", and here we use the `replace-with` key to indicate that it's +# replaced with our source above. +[source.crates-io] +replace-with = "my-vendor-source" + +# Each source has its own table where the key is the name of the source +[source.the-source-name] + +# Indicate that `the-source-name` will be replaced with `another-source`, +# defined elsewhere +replace-with = "another-source" + +# Several kinds of sources can be specified (described in more detail below): +registry = "https://example.com/path/to/index" +local-registry = "path/to/registry" +directory = "path/to/vendor" + +# Git sources can optionally specify a branch/tag/rev as well +git = "https://example.com/path/to/repo" +# branch = "master" +# tag = "v1.0.1" +# rev = "313f44e8" +``` + +[config]: config.md + +### Registry Sources + +A "registry source" is one that is the same as crates.io itself. That is, it has +an index served in a git repository which matches the format of the +[crates.io index](https://github.com/rust-lang/crates.io-index). That repository +then has configuration indicating where to download crates from. + +Currently there is not an already-available project for setting up a mirror of +crates.io. Stay tuned though! + +### Local Registry Sources + +A "local registry source" is intended to be a subset of another registry +source, but available on the local filesystem (aka vendoring). Local registries +are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are +made up of a set of `*.crate` files and an index like the normal registry is. + +The primary way to manage and create local registry sources is through the +[`cargo-local-registry`][cargo-local-registry] subcommand, +[available on crates.io][cargo-local-registry] and can be installed with +`cargo install cargo-local-registry`. + +[cargo-local-registry]: https://crates.io/crates/cargo-local-registry + +Local registries are contained within one directory and contain a number of +`*.crate` files downloaded from crates.io as well as an `index` directory with +the same format as the crates.io-index project (populated with just entries for +the crates that are present). + +### Directory Sources + +A "directory source" is similar to a local registry source where it contains a +number of crates available on the local filesystem, suitable for vendoring +dependencies. Directory sources are primarily managed the `cargo vendor` +subcommand. + +Directory sources are distinct from local registries though in that they contain +the unpacked version of `*.crate` files, making it more suitable in some +situations to check everything into source control. A directory source is just a +directory containing a number of other directories which contain the source code +for crates (the unpacked version of `*.crate` files). Currently no restriction +is placed on the name of each directory. + +Each crate in a directory source also has an associated metadata file indicating +the checksum of each file in the crate to protect against accidental +modifications. diff --git a/src/doc/src/reference/specifying-dependencies.md b/src/doc/src/reference/specifying-dependencies.md new file mode 100644 index 00000000000..5e99f3d6626 --- /dev/null +++ b/src/doc/src/reference/specifying-dependencies.md @@ -0,0 +1,622 @@ +## Specifying Dependencies + +Your crates can depend on other libraries from [crates.io] or other +registries, `git` repositories, or subdirectories on your local file system. +You can also temporarily override the location of a dependency — for example, +to be able to test out a bug fix in the dependency that you are working on +locally. You can have different dependencies for different platforms, and +dependencies that are only used during development. Let's take a look at how +to do each of these. + +### Specifying dependencies from crates.io + +Cargo is configured to look for dependencies on [crates.io] by default. Only +the name and a version string are required in this case. In [the cargo +guide](../guide/index.md), we specified a dependency on the `time` crate: + +```toml +[dependencies] +time = "0.1.12" +``` + +The string `"0.1.12"` is a [semver] version requirement. Since this +string does not have any operators in it, it is interpreted the same way as +if we had specified `"^0.1.12"`, which is called a caret requirement. + +[semver]: https://github.com/steveklabnik/semver#requirements + +### Caret requirements + +**Caret requirements** allow SemVer compatible updates to a specified version. +An update is allowed if the new version number does not modify the left-most +non-zero digit in the major, minor, patch grouping. In this case, if we ran +`cargo update -p time`, cargo should update us to version `0.1.13` if it is the +latest `0.1.z` release, but would not update us to `0.2.0`. If instead we had +specified the version string as `^1.0`, cargo should update to `1.1` if it is +the latest `1.y` release, but not `2.0`. The version `0.0.x` is not considered +compatible with any other version. + +Here are some more examples of caret requirements and the versions that would +be allowed with them: + +```notrust +^1.2.3 := >=1.2.3 <2.0.0 +^1.2 := >=1.2.0 <2.0.0 +^1 := >=1.0.0 <2.0.0 +^0.2.3 := >=0.2.3 <0.3.0 +^0.2 := >=0.2.0 <0.3.0 +^0.0.3 := >=0.0.3 <0.0.4 +^0.0 := >=0.0.0 <0.1.0 +^0 := >=0.0.0 <1.0.0 +``` + +This compatibility convention is different from SemVer in the way it treats +versions before 1.0.0. While SemVer says there is no compatibility before +1.0.0, Cargo considers `0.x.y` to be compatible with `0.x.z`, where `y ≥ z` +and `x > 0`. + +### Tilde requirements + +**Tilde requirements** specify a minimal version with some ability to update. +If you specify a major, minor, and patch version or only a major and minor +version, only patch-level changes are allowed. If you only specify a major +version, then minor- and patch-level changes are allowed. + +`~1.2.3` is an example of a tilde requirement. + +```notrust +~1.2.3 := >=1.2.3 <1.3.0 +~1.2 := >=1.2.0 <1.3.0 +~1 := >=1.0.0 <2.0.0 +``` + +### Wildcard requirements + +**Wildcard requirements** allow for any version where the wildcard is +positioned. + +`*`, `1.*` and `1.2.*` are examples of wildcard requirements. + +```notrust +* := >=0.0.0 +1.* := >=1.0.0 <2.0.0 +1.2.* := >=1.2.0 <1.3.0 +``` + +### Comparison requirements + +**Comparison requirements** allow manually specifying a version range or an +exact version to depend on. + +Here are some examples of comparison requirements: + +```notrust +>= 1.2.0 +> 1 +< 2 += 1.2.3 +``` + +### Multiple requirements + +Multiple version requirements can also be separated with a comma, e.g., `>= 1.2, +< 1.5`. + +### Specifying dependencies from other registries + +To specify a dependency from a registry other than [crates.io], first the +registry must be configured in a `.cargo/config` file. See the [registries +documentation] for more information. In the dependency, set the `registry` key +to the name of the registry to use. + +```toml +[dependencies] +some-crate = { version = "1.0", registry = "my-registry" } +``` + +[registries documentation]: registries.md + +### Specifying dependencies from `git` repositories + +To depend on a library located in a `git` repository, the minimum information +you need to specify is the location of the repository with the `git` key: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand" } +``` + +Cargo will fetch the `git` repository at this location then look for a +`Cargo.toml` for the requested crate anywhere inside the `git` repository +(not necessarily at the root - for example, specifying a member crate name +of a workspace and setting `git` to the repository containing the workspace). + +Since we haven’t specified any other information, Cargo assumes that +we intend to use the latest commit on the `master` branch to build our package. +You can combine the `git` key with the `rev`, `tag`, or `branch` keys to +specify something else. Here's an example of specifying that you want to use +the latest commit on a branch named `next`: + +```toml +[dependencies] +rand = { git = "https://github.com/rust-lang-nursery/rand", branch = "next" } +``` + +### Specifying path dependencies + +Over time, our `hello_world` package from [the guide](../guide/index.md) has +grown significantly in size! It’s gotten to the point that we probably want to +split out a separate crate for others to use. To do this Cargo supports **path +dependencies** which are typically sub-crates that live within one repository. +Let’s start off by making a new crate inside of our `hello_world` package: + +```console +# inside of hello_world/ +$ cargo new hello_utils +``` + +This will create a new folder `hello_utils` inside of which a `Cargo.toml` and +`src` folder are ready to be configured. In order to tell Cargo about this, open +up `hello_world/Cargo.toml` and add `hello_utils` to your dependencies: + +```toml +[dependencies] +hello_utils = { path = "hello_utils" } +``` + +This tells Cargo that we depend on a crate called `hello_utils` which is found +in the `hello_utils` folder (relative to the `Cargo.toml` it’s written in). + +And that’s it! The next `cargo build` will automatically build `hello_utils` and +all of its own dependencies, and others can also start using the crate as well. +However, crates that use dependencies specified with only a path are not +permitted on [crates.io]. If we wanted to publish our `hello_world` crate, we +would need to publish a version of `hello_utils` to [crates.io](https://crates.io) +and specify its version in the dependencies line as well: + +```toml +[dependencies] +hello_utils = { path = "hello_utils", version = "0.1.0" } +``` + +### Overriding dependencies + +There are a number of methods in Cargo to support overriding dependencies and +otherwise controlling the dependency graph. These options are typically, though, +only available at the workspace level and aren't propagated through +dependencies. In other words, "applications" have the ability to override +dependencies but "libraries" do not. + +The desire to override a dependency or otherwise alter some dependencies can +arise through a number of scenarios. Most of them, however, boil down to the +ability to work with a crate before it's been published to crates.io. For +example: + +* A crate you're working on is also used in a much larger application you're + working on, and you'd like to test a bug fix to the library inside of the + larger application. +* An upstream crate you don't work on has a new feature or a bug fix on the + master branch of its git repository which you'd like to test out. +* You're about to publish a new major version of your crate, but you'd like to + do integration testing across an entire package to ensure the new major + version works. +* You've submitted a fix to an upstream crate for a bug you found, but you'd + like to immediately have your application start depending on the fixed version + of the crate to avoid blocking on the bug fix getting merged. + +These scenarios are currently all solved with the [`[patch]` manifest +section][patch-section]. Historically some of these scenarios have been solved +with [the `[replace]` section][replace-section], but we'll document the `[patch]` +section here. + +[patch-section]: manifest.md#the-patch-section +[replace-section]: manifest.md#the-replace-section + +### Testing a bugfix + +Let's say you're working with the [`uuid` crate] but while you're working on it +you discover a bug. You are, however, quite enterprising so you decide to also +try to fix the bug! Originally your manifest will look like: + +[`uuid` crate]: https://crates.io/crates/uuid + +```toml +[package] +name = "my-library" +version = "0.1.0" +authors = ["..."] + +[dependencies] +uuid = "1.0" +``` + +First thing we'll do is to clone the [`uuid` repository][uuid-repository] +locally via: + +```console +$ git clone https://github.com/rust-lang-nursery/uuid +``` + +Next we'll edit the manifest of `my-library` to contain: + +```toml +[patch.crates-io] +uuid = { path = "../path/to/uuid" } +``` + +Here we declare that we're *patching* the source `crates-io` with a new +dependency. This will effectively add the local checked out version of `uuid` to +the crates.io registry for our local package. + +Next up we need to ensure that our lock file is updated to use this new version +of `uuid` so our package uses the locally checked out copy instead of one from +crates.io. The way `[patch]` works is that it'll load the dependency at +`../path/to/uuid` and then whenever crates.io is queried for versions of `uuid` +it'll *also* return the local version. + +This means that the version number of the local checkout is significant and will +affect whether the patch is used. Our manifest declared `uuid = "1.0"` which +means we'll only resolve to `>= 1.0.0, < 2.0.0`, and Cargo's greedy resolution +algorithm also means that we'll resolve to the maximum version within that +range. Typically this doesn't matter as the version of the git repository will +already be greater or match the maximum version published on crates.io, but it's +important to keep this in mind! + +In any case, typically all you need to do now is: + +```console +$ cargo build + Compiling uuid v1.0.0 (.../uuid) + Compiling my-library v0.1.0 (.../my-library) + Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs +``` + +And that's it! You're now building with the local version of `uuid` (note the +path in parentheses in the build output). If you don't see the local path version getting +built then you may need to run `cargo update -p uuid --precise $version` where +`$version` is the version of the locally checked out copy of `uuid`. + +Once you've fixed the bug you originally found the next thing you'll want to do +is to likely submit that as a pull request to the `uuid` crate itself. Once +you've done this then you can also update the `[patch]` section. The listing +inside of `[patch]` is just like the `[dependencies]` section, so once your pull +request is merged you could change your `path` dependency to: + +```toml +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +[uuid-repository]: https://github.com/rust-lang-nursery/uuid + +### Working with an unpublished minor version + +Let's now shift gears a bit from bug fixes to adding features. While working on +`my-library` you discover that a whole new feature is needed in the `uuid` +crate. You've implemented this feature, tested it locally above with `[patch]`, +and submitted a pull request. Let's go over how you continue to use and test it +before it's actually published. + +Let's also say that the current version of `uuid` on crates.io is `1.0.0`, but +since then the master branch of the git repository has updated to `1.0.1`. This +branch includes your new feature you submitted previously. To use this +repository we'll edit our `Cargo.toml` to look like + +```toml +[package] +name = "my-library" +version = "0.1.0" +authors = ["..."] + +[dependencies] +uuid = "1.0.1" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +Note that our local dependency on `uuid` has been updated to `1.0.1` as it's +what we'll actually require once the crate is published. This version doesn't +exist on crates.io, though, so we provide it with the `[patch]` section of the +manifest. + +Now when our library is built it'll fetch `uuid` from the git repository and +resolve to 1.0.1 inside the repository instead of trying to download a version +from crates.io. Once 1.0.1 is published on crates.io the `[patch]` section can +be deleted. + +It's also worth noting that `[patch]` applies *transitively*. Let's say you use +`my-library` in a larger package, such as: + +```toml +[package] +name = "my-binary" +version = "0.1.0" +authors = ["..."] + +[dependencies] +my-library = { git = 'https://example.com/git/my-library' } +uuid = "1.0" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid' } +``` + +Remember that `[patch]` is applicable *transitively* but can only be defined at +the *top level* so we consumers of `my-library` have to repeat the `[patch]` section +if necessary. Here, though, the new `uuid` crate applies to *both* our dependency on +`uuid` and the `my-library -> uuid` dependency. The `uuid` crate will be resolved to +one version for this entire crate graph, 1.0.1, and it'll be pulled from the git +repository. + +#### Overriding repository URL + +In case the dependency you want to override isn't loaded from `crates.io`, you'll have to change a bit how you use `[patch]`: + +```toml +[patch."https://github.com/your/repository"] +my-library = { path = "../my-library/path" } +``` + +And that's it! + +### Prepublishing a breaking change + +As a final scenario, let's take a look at working with a new major version of a +crate, typically accompanied with breaking changes. Sticking with our previous +crates, this means that we're going to be creating version 2.0.0 of the `uuid` +crate. After we've submitted all changes upstream we can update our manifest for +`my-library` to look like: + +```toml +[dependencies] +uuid = "2.0" + +[patch.crates-io] +uuid = { git = "https://github.com/rust-lang-nursery/uuid", branch = "2.0.0" } +``` + +And that's it! Like with the previous example the 2.0.0 version doesn't actually +exist on crates.io but we can still put it in through a git dependency through +the usage of the `[patch]` section. As a thought exercise let's take another +look at the `my-binary` manifest from above again as well: + +```toml +[package] +name = "my-binary" +version = "0.1.0" +authors = ["..."] + +[dependencies] +my-library = { git = 'https://example.com/git/my-library' } +uuid = "1.0" + +[patch.crates-io] +uuid = { git = 'https://github.com/rust-lang-nursery/uuid', branch = '2.0.0' } +``` + +Note that this will actually resolve to two versions of the `uuid` crate. The +`my-binary` crate will continue to use the 1.x.y series of the `uuid` crate but +the `my-library` crate will use the 2.0.0 version of `uuid`. This will allow you +to gradually roll out breaking changes to a crate through a dependency graph +without being force to update everything all at once. + +### Overriding with local dependencies + +Sometimes you're only temporarily working on a crate and you don't want to have +to modify `Cargo.toml` like with the `[patch]` section above. For this use +case Cargo offers a much more limited version of overrides called **path +overrides**. + +Path overrides are specified through `.cargo/config` instead of `Cargo.toml`, +and you can find [more documentation about this configuration][config-docs]. +Inside of `.cargo/config` you'll specify a key called `paths`: + +[config-docs]: config.md + +```toml +paths = ["/path/to/uuid"] +``` + +This array should be filled with directories that contain a `Cargo.toml`. In +this instance, we’re just adding `uuid`, so it will be the only one that’s +overridden. This path can be either absolute or relative to the directory that +contains the `.cargo` folder. + +Path overrides are more restricted than the `[patch]` section, however, in +that they cannot change the structure of the dependency graph. When a +path replacement is used then the previous set of dependencies +must all match exactly to the new `Cargo.toml` specification. For example this +means that path overrides cannot be used to test out adding a dependency to a +crate, instead `[patch]` must be used in that situation. As a result usage of a +path override is typically isolated to quick bug fixes rather than larger +changes. + +Note: using a local configuration to override paths will only work for crates +that have been published to [crates.io]. You cannot use this feature to tell +Cargo how to find local unpublished crates. + +### Platform specific dependencies + + +Platform-specific dependencies take the same format, but are listed under a +`target` section. Normally Rust-like [`#[cfg]` +syntax](../../reference/conditional-compilation.html) will be used to define +these sections: + +```toml +[target.'cfg(windows)'.dependencies] +winhttp = "0.4.0" + +[target.'cfg(unix)'.dependencies] +openssl = "1.0.1" + +[target.'cfg(target_arch = "x86")'.dependencies] +native = { path = "native/i686" } + +[target.'cfg(target_arch = "x86_64")'.dependencies] +native = { path = "native/x86_64" } +``` + +Like with Rust, the syntax here supports the `not`, `any`, and `all` operators +to combine various cfg name/value pairs. + +If you want to know which cfg targets are available on your platform, run +`rustc --print=cfg` from the command line. If you want to know which `cfg` +targets are available for another platform, such as 64-bit Windows, +run `rustc --print=cfg --target=x86_64-pc-windows-msvc`. + +Unlike in your Rust source code, +you cannot use `[target.'cfg(feature = "my_crate")'.dependencies]` to add +dependencies based on optional crate features. +Use [the `[features]` section](manifest.md#the-features-section) +instead. + +In addition to `#[cfg]` syntax, Cargo also supports listing out the full target +the dependencies would apply to: + +```toml +[target.x86_64-pc-windows-gnu.dependencies] +winhttp = "0.4.0" + +[target.i686-unknown-linux-gnu.dependencies] +openssl = "1.0.1" +``` + +If you’re using a custom target specification, quote the full path and file +name: + +```toml +[target."x86_64/windows.json".dependencies] +winhttp = "0.4.0" + +[target."i686/linux.json".dependencies] +openssl = "1.0.1" +native = { path = "native/i686" } + +[target."x86_64/linux.json".dependencies] +openssl = "1.0.1" +native = { path = "native/x86_64" } +``` + +### Development dependencies + +You can add a `[dev-dependencies]` section to your `Cargo.toml` whose format +is equivalent to `[dependencies]`: + +```toml +[dev-dependencies] +tempdir = "0.3" +``` + +Dev-dependencies are not used when compiling +a package for building, but are used for compiling tests, examples, and +benchmarks. + +These dependencies are *not* propagated to other packages which depend on this +package. + +You can also have target-specific development dependencies by using +`dev-dependencies` in the target section header instead of `dependencies`. For +example: + +```toml +[target.'cfg(unix)'.dev-dependencies] +mio = "0.0.1" +``` + +[crates.io]: https://crates.io/ + +### Build dependencies + +You can depend on other Cargo-based crates for use in your build scripts. +Dependencies are declared through the `build-dependencies` section of the +manifest: + +```toml +[build-dependencies] +cc = "1.0.3" +``` + +The build script **does not** have access to the dependencies listed +in the `dependencies` or `dev-dependencies` section. Build +dependencies will likewise not be available to the package itself +unless listed under the `dependencies` section as well. A package +itself and its build script are built separately, so their +dependencies need not coincide. Cargo is kept simpler and cleaner by +using independent dependencies for independent purposes. + +### Choosing features + +If a package you depend on offers conditional features, you can +specify which to use: + +```toml +[dependencies.awesome] +version = "1.3.5" +default-features = false # do not include the default features, and optionally + # cherry-pick individual features +features = ["secure-password", "civet"] +``` + +More information about features can be found in the +[manifest documentation](manifest.md#the-features-section). + +### Renaming dependencies in `Cargo.toml` + +When writing a `[dependencies]` section in `Cargo.toml` the key you write for a +dependency typically matches up to the name of the crate you import from in the +code. For some projects, though, you may wish to reference the crate with a +different name in the code regardless of how it's published on crates.io. For +example you may wish to: + +* Avoid the need to `use foo as bar` in Rust source. +* Depend on multiple versions of a crate. +* Depend on crates with the same name from different registries. + +To support this Cargo supports a `package` key in the `[dependencies]` section +of which package should be depended on: + +```toml +[package] +name = "mypackage" +version = "0.0.1" + +[dependencies] +foo = "0.1" +bar = { git = "https://github.com/example/project", package = "foo" } +baz = { version = "0.1", registry = "custom", package = "foo" } +``` + +In this example, three crates are now available in your Rust code: + +```rust +extern crate foo; // crates.io +extern crate bar; // git repository +extern crate baz; // registry `custom` +``` + +All three of these crates have the package name of `foo` in their own +`Cargo.toml`, so we're explicitly using the `package` key to inform Cargo that +we want the `foo` package even though we're calling it something else locally. +The `package` key, if not specified, defaults to the name of the dependency +being requested. + +Note that if you have an optional dependency like: + +```toml +[dependencies] +foo = { version = "0.1", package = 'bar', optional = true } +``` + +you're depending on the crate `bar` from crates.io, but your crate has a `foo` +feature instead of a `bar` feature. That is, names of features take after the +name of the dependency, not the package name, when renamed. + +Enabling transitive dependencies works similarly, for example we could add the +following to the above manifest: + +```toml +[features] +log-debug = ['foo/log-debug'] # using 'bar/log-debug' would be an error! +``` diff --git a/src/doc/src/reference/unstable.md b/src/doc/src/reference/unstable.md new file mode 100644 index 00000000000..5ec4e191614 --- /dev/null +++ b/src/doc/src/reference/unstable.md @@ -0,0 +1,257 @@ +## Unstable Features + +Experimental Cargo features are only available on the nightly channel. You +typically use one of the `-Z` flags to enable them. Run `cargo -Z help` to +see a list of flags available. + +`-Z unstable-options` is a generic flag for enabling other unstable +command-line flags. Options requiring this will be called out below. + +Some unstable features will require you to specify the `cargo-features` key in +`Cargo.toml`. + +### no-index-update +* Original Issue: [#3479](https://github.com/rust-lang/cargo/issues/3479) + +The `-Z no-index-update` flag ensures that Cargo does not attempt to update +the registry index. This is intended for tools such as Crater that issue many +Cargo commands, and you want to avoid the network latency for updating the +index each time. + +### avoid-dev-deps +* Original Issue: [#4988](https://github.com/rust-lang/cargo/issues/4988) +* Stabilization Issue: [#5133](https://github.com/rust-lang/cargo/issues/5133) + +When running commands such as `cargo install` or `cargo build`, Cargo +currently requires dev-dependencies to be downloaded, even if they are not +used. The `-Z avoid-dev-deps` flag allows Cargo to avoid downloading +dev-dependencies if they are not needed. The `Cargo.lock` file will not be +generated if dev-dependencies are skipped. + +### minimal-versions +* Original Issue: [#4100](https://github.com/rust-lang/cargo/issues/4100) +* Tracking Issue: [#5657](https://github.com/rust-lang/cargo/issues/5657) + +When a `Cargo.lock` file is generated, the `-Z minimal-versions` flag will +resolve the dependencies to the minimum semver version that will satisfy the +requirements (instead of the greatest version). + +The intended use-case of this flag is to check, during continuous integration, +that the versions specified in Cargo.toml are a correct reflection of the +minimum versions that you are actually using. That is, if Cargo.toml says +`foo = "1.0.0"` that you don't accidentally depend on features added only in +`foo 1.5.0`. + +### out-dir +* Original Issue: [#4875](https://github.com/rust-lang/cargo/issues/4875) +* Tracking Issue: [#6790](https://github.com/rust-lang/cargo/issues/6790) + +This feature allows you to specify the directory where artifacts will be +copied to after they are built. Typically artifacts are only written to the +`target/release` or `target/debug` directories. However, determining the +exact filename can be tricky since you need to parse JSON output. The +`--out-dir` flag makes it easier to predictably access the artifacts. Note +that the artifacts are copied, so the originals are still in the `target` +directory. Example: + +``` +cargo +nightly build --out-dir=out -Z unstable-options +``` + + +### Profile Overrides +* Tracking Issue: [rust-lang/rust#48683](https://github.com/rust-lang/rust/issues/48683) +* RFC: [#2282](https://github.com/rust-lang/rfcs/blob/master/text/2282-profile-dependencies.md) + +Profiles can be overridden for specific packages and custom build scripts. +The general format looks like this: + +```toml +cargo-features = ["profile-overrides"] + +[package] +... + +[profile.dev] +opt-level = 0 +debug = true + +# the `image` crate will be compiled with -Copt-level=3 +[profile.dev.overrides.image] +opt-level = 3 + +# All dependencies (but not this crate itself or any workspace member) +# will be compiled with -Copt-level=2 . This includes build dependencies. +[profile.dev.overrides."*"] +opt-level = 2 + +# Build scripts or proc-macros and their dependencies will be compiled with +# `-Copt-level=3`. By default, they use the same rules as the rest of the +# profile. +[profile.dev.build-override] +opt-level = 3 +``` + +Overrides can only be specified for dev and release profiles. + + +### Config Profiles +* Tracking Issue: [rust-lang/rust#48683](https://github.com/rust-lang/rust/issues/48683) +* RFC: [#2282](https://github.com/rust-lang/rfcs/blob/master/text/2282-profile-dependencies.md) + +Profiles can be specified in `.cargo/config` files. The `-Z config-profile` +command-line flag is required to use this feature. The format is the same as +in a `Cargo.toml` manifest. If found in multiple config files, settings will +be merged using the regular [config hierarchy](config.md#hierarchical-structure). +Config settings take precedence over manifest settings. + +```toml +[profile.dev] +opt-level = 3 +``` + +``` +cargo +nightly build -Z config-profile +``` + + +### Namespaced features +* Original issue: [#1286](https://github.com/rust-lang/cargo/issues/1286) +* Tracking Issue: [#5565](https://github.com/rust-lang/cargo/issues/5565) + +Currently, it is not possible to have a feature and a dependency with the same +name in the manifest. If you set `namespaced-features` to `true`, the namespaces +for features and dependencies are separated. The effect of this is that, in the +feature requirements, dependencies have to be prefixed with `crate:`. Like this: + +```toml +[package] +namespaced-features = true + +[features] +bar = ["crate:baz", "foo"] +foo = [] + +[dependencies] +baz = { version = "0.1", optional = true } +``` + +To prevent unnecessary boilerplate from having to explicitly declare features +for each optional dependency, implicit features get created for any optional +dependencies where a feature of the same name is not defined. However, if +a feature of the same name as a dependency is defined, that feature must +include the dependency as a requirement, as `foo = ["crate:foo"]`. + + +### Build-plan +* Tracking Issue: [#5579](https://github.com/rust-lang/cargo/issues/5579) + +The `--build-plan` argument for the `build` command will output JSON with +information about which commands would be run without actually executing +anything. This can be useful when integrating with another build tool. +Example: + +``` +cargo +nightly build --build-plan -Z unstable-options +``` + +### Metabuild +* Tracking Issue: [rust-lang/rust#49803](https://github.com/rust-lang/rust/issues/49803) +* RFC: [#2196](https://github.com/rust-lang/rfcs/blob/master/text/2196-metabuild.md) + +Metabuild is a feature to have declarative build scripts. Instead of writing +a `build.rs` script, you specify a list of build dependencies in the +`metabuild` key in `Cargo.toml`. A build script is automatically generated +that runs each build dependency in order. Metabuild packages can then read +metadata from `Cargo.toml` to specify their behavior. + +Include `cargo-features` at the top of `Cargo.toml`, a `metabuild` key in the +`package`, list the dependencies in `build-dependencies`, and add any metadata +that the metabuild packages require under `package.metadata`. Example: + +```toml +cargo-features = ["metabuild"] + +[package] +name = "mypackage" +version = "0.0.1" +metabuild = ["foo", "bar"] + +[build-dependencies] +foo = "1.0" +bar = "1.0" + +[package.metadata.foo] +extra-info = "qwerty" +``` + +Metabuild packages should have a public function called `metabuild` that +performs the same actions as a regular `build.rs` script would perform. + +### install-upgrade +* Tracking Issue: [#6797](https://github.com/rust-lang/cargo/issues/6797) + +The `install-upgrade` feature changes the behavior of `cargo install` so that +it will reinstall a package if it is not "up-to-date". If it is "up-to-date", +it will do nothing and exit with success instead of failing. Example: + +``` +cargo +nightly install foo -Z install-upgrade +``` + +Cargo tracks some information to determine if a package is "up-to-date", +including: + +- The package version and source. +- The set of binary names installed. +- The chosen features. +- The release mode (`--debug`). +- The target (`--target`). + +If any of these values change, then Cargo will reinstall the package. + +Installation will still fail if a different package installs a binary of the +same name. `--force` may be used to unconditionally reinstall the package. + +Installing with `--path` will always build and install, unless there are +conflicting binaries from another package. + +Additionally, a new flag `--no-track` is available to prevent `cargo install` +from writing tracking information in `$CARGO_HOME` about which packages are +installed. + +### public-dependency +* Tracking Issue: [#44663](https://github.com/rust-lang/rust/issues/44663) + +The 'public-dependency' feature allows marking dependencies as 'public' +or 'private'. When this feature is enabled, additional information is passed to rustc to allow +the 'exported_private_dependencies' lint to function properly. + +This requires the appropriate key to be set in `cargo-features`: + +```toml +cargo-features = ["public-dependency"] + +[dependencies] +my_dep = { version = "1.2.3", public = true } +private_dep = "2.0.0" # Will be 'private' by default +``` + +### cache-messages +* Tracking Issue: [#6986](https://github.com/rust-lang/cargo/issues/6986) + +The `cache-messages` feature causes Cargo to cache the messages generated by +the compiler. This is primarily useful if a crate compiles successfully with +warnings. Previously, re-running Cargo would not display any output. With the +`cache-messages` feature, it will quickly redisplay the previous warnings. + +``` +cargo +nightly check -Z cache-messages +``` + +This works with any command that runs the compiler (`build`, `check`, `test`, +etc.). + +This also changes the way Cargo interacts with the compiler, helping to +prevent interleaved messages when multiple crates attempt to display a message +at the same time. diff --git a/src/doc/stylesheets/all.css b/src/doc/stylesheets/all.css deleted file mode 100644 index 7a1709c57ed..00000000000 --- a/src/doc/stylesheets/all.css +++ /dev/null @@ -1,289 +0,0 @@ -html { - background: url("../images/noise.png"); - background-color: #3b6837; -} - -main, #header { width: 900px; } - -* { - box-sizing: border-box; -} - -body { - display: -webkit-flex; - display: flex; - -webkit-flex-direction: column; - flex-direction: column; - -webkit-align-items: center; - align-items: center; - font-family: sans-serif; -} - -a { color: #00ac5b; text-decoration: none; } -a:hover { color: #00793f; } - -h1 { - font-size: 24px; - margin: 20px 0 10px 0; - font-weight: bold; - color: #b64790; -} - -h1 code:not(.highlight) { - color: #d9a700; - vertical-align: bottom; -} -h1 a, h2 a { color: #b64790; text-decoration: none; } -h1:hover a, h2:hover a { color: #A03D7E; } -h1:hover a:after, -h2:hover a:after { content: '\2002\00a7\2002'; } -:target { background: rgba(239, 242, 178, 1); padding: 5px; } - -h1.title { /* style rustdoc-generated title */ - width: 100%; - padding: 40px 20px 40px 60px; - background-color: #edebdd; - margin-bottom: 20px; - -webkit-border-radius: 5px; - -moz-border-radius: 5px; - -ms-border-radius: 5px; - border-radius: 5px; - margin: 0; - color: #383838; - font-size: 2em; - background-image: url(../images/circle-with-i.png); - background-repeat: no-repeat; - background-position: 20px center; -} - -h2 { - font-size: 18px; - margin: 15px 0 5px 0; - color: #b64790; - font-weight: bold; -} - -h2 code:not(.highlight) { color: #d9a700; } - -code:not(.highlight) { - font-family: monospace; - color: #b64790; -} - -main { - display: -webkit-flex; - display: flex; - -webkit-flex-direction: column; - flex-direction: column; - - width: 100%; - max-width: 900px; - margin-bottom: 10px; - - background-color: #f9f7ec; - padding: 15px; - - -webkit-border-radius: 10px; - -moz-border-radius: 10px; - -ms-border-radius: 10px; - border-radius: 10px; - box-shadow: 0px 0px 5px 2px #3b6837; - border: 5px solid #62865f; - color: #383838; -} - -main > p:first-child { - font-weight: 500; - margin-top: 3px; - padding-bottom: 15px; - border-bottom: 1px solid #62865f; - text-align: center; -} - -main p:first-child a { color: #3b6837; } -main p:first-child a:hover { color: #62865f; } - -main p, main ul { - /* color: #3b6837; */ - margin: 10px 0; - line-height: 150%; -} - -main ul { margin-left: 20px; } -main li { list-style-type: disc; } -main strong { font-weight: bold; } - -img.logo { - align-self: center; - margin-bottom: 10px; -} - -pre { - padding: 10px; - margin: 10px 0; - /* border: 1px solid #cad0d0; */ - border-radius: 4px; - max-width: calc(100vw - 45px); - overflow-x: auto; - - background: #383838 !important; - color: white; - padding: 20px; - - /* override prism.js styles */ - font-size: 1em !important; - border: none !important; - box-shadow: none !important; - text-shadow: none !important; -} - -pre code { - text-shadow: none !important; -} - -footer { - padding: 40px; - width: 900px; -} -footer a { - color: white; -} -footer a:hover { - color: #e6e6e6; -} -footer .sep, #header .sep { color: #284725; } -footer .sep { margin: 0 10px; } -#header .sep { margin-left: 10px; } - -.headerlink { - display: none; - text-decoration: none; -} -.fork-me { - position:absolute; - top:0; - right:0; -} - -.token.toml-section { color: #CB4B16; } -.token.toml-key { color: #268BD2; } - -/* Rust code highlighting */ -pre.rust .kw { color: #8959A8; } -pre.rust .kw-2, pre.rust .prelude-ty { color: #4271AE; } -pre.rust .number, pre.rust .string { color: #718C00; } -pre.rust .self, pre.rust .boolval, pre.rust .prelude-val, -pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; } -pre.rust .comment { color: #8E908C; } -pre.rust .doccomment { color: #4D4D4C; } -pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; } -pre.rust .lifetime { color: #B76514; } -code span.s1 { color: #2AA198; } - -table th { border-bottom: 1px solid black; } -table td, table th { padding: 5px 10px; } - -#header { - color: white; - position: relative; - height: 100px; - display: -webkit-flex; - display: flex; - -webkit-align-items: center; - align-items: center; -} -#header h1 { font-size: 2em; } -#header a, #header h1 { color: white; text-decoration: none; } -#header a:hover { color: #d9d9d9; } - -#header input.search { - border: none; - color: black; - outline: 0; - margin-left: 30px; - padding: 5px 5px 5px 25px; - background-image: url(../images/search.png); - background-repeat: no-repeat; - background-position: 6px 6px; - -webkit-border-radius: 15px; - -moz-border-radius: 15px; - -ms-border-radius: 15px; - border-radius: 15px; -} - -#header .nav { - -webkit-flex-grow: 2; - flex-grow: 2; - text-align: right; -} - -button.dropdown, a.dropdown { cursor: pointer; } -button.dropdown .arrow, a.dropdown .arrow { - font-size: 50%; display: inline-block; vertical-align: middle; -} -button.dropdown .arrow::after, a.dropdown .arrow::after { content: "▼"; } -button.active.dropdown .arrow::after, a.active.dropdown .arrow::after { - content: "▲"; -} - -button { - background: none; - outline: 0; - border: 0; - padding: 10px; - color: white; -} - -button.active { - background:#2a4f27; - box-shadow:inset -2px 2px 4px 0 #243d26 -} - -ul.dropdown { - display: none; - visibility: none; - position: absolute; - top: 100%; - left: 0; - width: 100%; - opacity: 0; - margin: 0; - text-align: left; - padding: 0; - background: white; - border: 1px solid #d5d3cb; - list-style: none; - z-index: 10; - -webkit-border-radius: 5px; - -moz-border-radius: 5px; - -ms-border-radius: 5px; - border-radius: 5px; -} - -ul.dropdown li a { - font-size: 90%; - width: 100%; - display: inline-block; - padding: 8px 10px; - text-decoration: none; - color: #383838 !important; -} - -ul.dropdown li a:hover { - background: #5e5e5e; - color: white !important; -} -ul.dropdown li.last { border-top: 1px solid #d5d3cb; } -ul.dropdown.open { - display: block; - visibility: visible; - opacity: 1; -} -.dropdown-container { - display: inline-block; - position: relative; -} - -p > img { - max-width: 100%; -} diff --git a/src/doc/stylesheets/normalize.css b/src/doc/stylesheets/normalize.css deleted file mode 100644 index 73abb76fa41..00000000000 --- a/src/doc/stylesheets/normalize.css +++ /dev/null @@ -1,375 +0,0 @@ -/*! normalize.css v2.0.1 | MIT License | git.io/normalize */ - -/* ========================================================================== - HTML5 display definitions - ========================================================================== */ - -/* - * Corrects `block` display not defined in IE 8/9. - */ - -article, -aside, -details, -figcaption, -figure, -footer, -header, -hgroup, -nav, -section, -summary { - display: block; -} - -/* - * Corrects `inline-block` display not defined in IE 8/9. - */ - -audio, -canvas, -video { - display: inline-block; -} - -/* - * Prevents modern browsers from displaying `audio` without controls. - * Remove excess height in iOS 5 devices. - */ - -audio:not([controls]) { - display: none; - height: 0; -} - -/* - * Addresses styling for `hidden` attribute not present in IE 8/9. - */ - -[hidden] { - display: none; -} - -/* ========================================================================== - Base - ========================================================================== */ - -/* - * 1. Sets default font family to sans-serif. - * 2. Prevents iOS text size adjust after orientation change, without disabling - * user zoom. - */ - -html { - font-family: sans-serif; /* 1 */ - -webkit-text-size-adjust: 100%; /* 2 */ - -ms-text-size-adjust: 100%; /* 2 */ -} - -/* - * Removes default margin. - */ - -body { - margin: 0; -} - -/* ========================================================================== - Links - ========================================================================== */ - -/* - * Addresses `outline` inconsistency between Chrome and other browsers. - */ - -a:focus { - outline: thin dotted; -} - -/* - * Improves readability when focused and also mouse hovered in all browsers. - */ - -a:active, -a:hover { - outline: 0; -} - -/* ========================================================================== - Typography - ========================================================================== */ - -/* - * Addresses `h1` font sizes within `section` and `article` in Firefox 4+, - * Safari 5, and Chrome. - */ - -h1 { - font-size: 2em; -} - -/* - * Addresses styling not present in IE 8/9, Safari 5, and Chrome. - */ - -abbr[title] { - border-bottom: 1px dotted; -} - -/* - * Addresses style set to `bolder` in Firefox 4+, Safari 5, and Chrome. - */ - -b, -strong { - font-weight: bold; -} - -/* - * Addresses styling not present in Safari 5 and Chrome. - */ - -dfn { - font-style: italic; -} - -/* - * Addresses styling not present in IE 8/9. - */ - -mark { - background: #ff0; - color: #000; -} - - -/* - * Corrects font family set oddly in Safari 5 and Chrome. - */ - -code, -kbd, -pre, -samp { - font-family: monospace, serif; - font-size: 1em; -} - -/* - * Improves readability of pre-formatted text in all browsers. - */ - -pre { - white-space: pre; - white-space: pre-wrap; - word-wrap: break-word; -} - -/* - * Sets consistent quote types. - */ - -q { - quotes: "\201C" "\201D" "\2018" "\2019"; -} - -/* - * Addresses inconsistent and variable font size in all browsers. - */ - -small { - font-size: 80%; -} - -/* - * Prevents `sub` and `sup` affecting `line-height` in all browsers. - */ - -sub, -sup { - font-size: 75%; - line-height: 0; - position: relative; - vertical-align: baseline; -} - -sup { - top: -0.5em; -} - -sub { - bottom: -0.25em; -} - -/* ========================================================================== - Embedded content - ========================================================================== */ - -/* - * Removes border when inside `a` element in IE 8/9. - */ - -img { - border: 0; -} - -/* - * Corrects overflow displayed oddly in IE 9. - */ - -svg:not(:root) { - overflow: hidden; -} - -/* ========================================================================== - Figures - ========================================================================== */ - -/* - * Addresses margin not present in IE 8/9 and Safari 5. - */ - -figure { - margin: 0; -} - -/* ========================================================================== - Forms - ========================================================================== */ - -/* - * Define consistent border, margin, and padding. - */ - -fieldset { - border: 1px solid #c0c0c0; - margin: 0 2px; - padding: 0.35em 0.625em 0.75em; -} - -/* - * 1. Corrects color not being inherited in IE 8/9. - * 2. Remove padding so people aren't caught out if they zero out fieldsets. - */ - -legend { - border: 0; /* 1 */ - padding: 0; /* 2 */ -} - -/* - * 1. Corrects font family not being inherited in all browsers. - * 2. Corrects font size not being inherited in all browsers. - * 3. Addresses margins set differently in Firefox 4+, Safari 5, and Chrome - */ - -button, -input, -select, -textarea { - font-family: inherit; /* 1 */ - font-size: 100%; /* 2 */ - margin: 0; /* 3 */ -} - -/* - * Addresses Firefox 4+ setting `line-height` on `input` using `!important` in - * the UA stylesheet. - */ - -button, -input { - line-height: normal; -} - -/* - * 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` - * and `video` controls. - * 2. Corrects inability to style clickable `input` types in iOS. - * 3. Improves usability and consistency of cursor style between image-type - * `input` and others. - */ - -button, -html input[type="button"], /* 1 */ -input[type="reset"], -input[type="submit"] { - -webkit-appearance: button; /* 2 */ - cursor: pointer; /* 3 */ -} - -/* - * Re-set default cursor for disabled elements. - */ - -button[disabled], -input[disabled] { - cursor: default; -} - -/* - * 1. Addresses box sizing set to `content-box` in IE 8/9. - * 2. Removes excess padding in IE 8/9. - */ - -input[type="checkbox"], -input[type="radio"] { - box-sizing: border-box; /* 1 */ - padding: 0; /* 2 */ -} - -/* - * 1. Addresses `appearance` set to `searchfield` in Safari 5 and Chrome. - * 2. Addresses `box-sizing` set to `border-box` in Safari 5 and Chrome - * (include `-moz` to future-proof). - */ - -input[type="search"] { - -webkit-appearance: textfield; /* 1 */ - -moz-box-sizing: content-box; - -webkit-box-sizing: content-box; /* 2 */ - box-sizing: content-box; -} - -/* - * Removes inner padding and search cancel button in Safari 5 and Chrome - * on OS X. - */ - -input[type="search"]::-webkit-search-cancel-button, -input[type="search"]::-webkit-search-decoration { - -webkit-appearance: none; -} - -/* - * Removes inner padding and border in Firefox 4+. - */ - -button::-moz-focus-inner, -input::-moz-focus-inner { - border: 0; - padding: 0; -} - -/* - * 1. Removes default vertical scrollbar in IE 8/9. - * 2. Improves readability and alignment in all browsers. - */ - -textarea { - overflow: auto; /* 1 */ - vertical-align: top; /* 2 */ -} - -/* ========================================================================== - Tables - ========================================================================== */ - -/* - * Remove most spacing between table cells. - */ - -table { - border-collapse: collapse; - border-spacing: 0; -} \ No newline at end of file diff --git a/src/doc/stylesheets/prism.css b/src/doc/stylesheets/prism.css deleted file mode 100644 index d80a9410284..00000000000 --- a/src/doc/stylesheets/prism.css +++ /dev/null @@ -1,197 +0,0 @@ -/* http://prismjs.com/download.html?themes=prism-twilight&languages=markup+css+clike+javascript */ -/** - * prism.js Twilight theme - * Based (more or less) on the Twilight theme originally of Textmate fame. - * @author Remy Bach - */ -code[class*="language-"], -pre[class*="language-"] { - color: white; - direction: ltr; - font-family: Consolas, Monaco, 'Andale Mono', monospace; - text-align: left; - text-shadow: 0 -.1em .2em black; - white-space: pre; - word-spacing: normal; - word-break: normal; - line-height: 1.5; - - -moz-tab-size: 4; - -o-tab-size: 4; - tab-size: 4; - - -webkit-hyphens: none; - -moz-hyphens: none; - -ms-hyphens: none; - hyphens: none; -} - -pre[class*="language-"], -:not(pre) > code[class*="language-"] { - background: hsl(0, 0%, 8%); /* #141414 */ -} - -/* Code blocks */ -pre[class*="language-"] { - border-radius: .5em; - border: .3em solid hsl(0, 0%, 33%); /* #282A2B */ - box-shadow: 1px 1px .5em black inset; - margin: .5em 0; - overflow: auto; - padding: 1em; -} - -pre[class*="language-"]::selection { - /* Safari */ - background: hsl(200, 4%, 16%); /* #282A2B */ -} - -pre[class*="language-"]::selection { - /* Firefox */ - background: hsl(200, 4%, 16%); /* #282A2B */ -} - -/* Text Selection colour */ -pre[class*="language-"]::-moz-selection, pre[class*="language-"] ::-moz-selection, -code[class*="language-"]::-moz-selection, code[class*="language-"] ::-moz-selection { - text-shadow: none; - background: hsla(0, 0%, 93%, 0.15); /* #EDEDED */ -} - -pre[class*="language-"]::selection, pre[class*="language-"] ::selection, -code[class*="language-"]::selection, code[class*="language-"] ::selection { - text-shadow: none; - background: hsla(0, 0%, 93%, 0.15); /* #EDEDED */ -} - -/* Inline code */ -:not(pre) > code[class*="language-"] { - border-radius: .3em; - border: .13em solid hsl(0, 0%, 33%); /* #545454 */ - box-shadow: 1px 1px .3em -.1em black inset; - padding: .15em .2em .05em; -} - -.token.comment, -.token.prolog, -.token.doctype, -.token.cdata { - color: hsl(0, 0%, 47%); /* #777777 */ -} - -.token.punctuation { - opacity: .7; -} - -.namespace { - opacity: .7; -} - -.token.tag, -.token.boolean, -.token.number, -.token.deleted { - color: hsl(14, 58%, 55%); /* #CF6A4C */ -} - -.token.keyword, -.token.property, -.token.selector, -.token.constant, -.token.symbol, -.token.builtin { - color: hsl(53, 89%, 79%); /* #F9EE98 */ -} - -.token.attr-name, -.token.attr-value, -.token.string, -.token.char, -.token.operator, -.token.entity, -.token.url, -.language-css .token.string, -.style .token.string, -.token.variable, -.token.inserted { - color: hsl(76, 21%, 52%); /* #8F9D6A */ -} - -.token.atrule { - color: hsl(218, 22%, 55%); /* #7587A6 */ -} - -.token.regex, -.token.important { - color: hsl(42, 75%, 65%); /* #E9C062 */ -} - -.token.important { - font-weight: bold; -} - -.token.entity { - cursor: help; -} - -pre[data-line] { - padding: 1em 0 1em 3em; - position: relative; -} - -/* Markup */ -.language-markup .token.tag, -.language-markup .token.attr-name, -.language-markup .token.punctuation { - color: hsl(33, 33%, 52%); /* #AC885B */ -} - -/* Make the tokens sit above the line highlight so the colours don't look faded. */ -.token { - position: relative; - z-index: 1; -} - -.line-highlight { - background: -moz-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ - background: -o-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ - background: -webkit-linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ - background: hsla(0, 0%, 33%, 0.25); /* #545454 */ - background: linear-gradient(left, hsla(0, 0%, 33%, .1) 70%, hsla(0, 0%, 33%, 0)); /* #545454 */ - border-bottom: 1px dashed hsl(0, 0%, 33%); /* #545454 */ - border-top: 1px dashed hsl(0, 0%, 33%); /* #545454 */ - left: 0; - line-height: inherit; - margin-top: 0.75em; /* Same as .prism’s padding-top */ - padding: inherit 0; - pointer-events: none; - position: absolute; - right: 0; - white-space: pre; - z-index: 0; -} - -.line-highlight:before, -.line-highlight[data-end]:after { - background-color: hsl(215, 15%, 59%); /* #8794A6 */ - border-radius: 999px; - box-shadow: 0 1px white; - color: hsl(24, 20%, 95%); /* #F5F2F0 */ - content: attr(data-start); - font: bold 65%/1.5 sans-serif; - left: .6em; - min-width: 1em; - padding: 0 .5em; - position: absolute; - text-align: center; - text-shadow: none; - top: .4em; - vertical-align: .3em; -} - -.line-highlight[data-end]:after { - bottom: .4em; - content: attr(data-end); - top: auto; -} - diff --git a/src/doc/theme/favicon.png b/src/doc/theme/favicon.png new file mode 100644 index 00000000000..a91ad692c91 Binary files /dev/null and b/src/doc/theme/favicon.png differ diff --git a/src/etc/_cargo b/src/etc/_cargo index a1c43aff8c0..1c8c413f16c 100644 --- a/src/etc/_cargo +++ b/src/etc/_cargo @@ -1,315 +1,460 @@ #compdef cargo -typeset -A opt_args autoload -U regexp-replace _cargo() { - -_arguments \ - '(- 1 *)'{-h,--help}'[show help message]' \ - '(- 1 *)'--list'[list installed commands]' \ - '(- 1 *)'{-v,--verbose}'[use verbose output]' \ - '(- 1 *)'--color'[colorization option]' \ - '(- 1 *)'{-V,--version}'[show version information]' \ - '1: :_cargo_cmds' \ - '*:: :->args' - -case $state in - args) - #TODO: add path completion to manifest-path options - case $words[1] in - bench) - _arguments \ - '--features=[space separated feature list]' \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ - '--manifest-path=[path to manifest]' \ - '--bench=[benchmark name]: :_benchmark_names' \ - '--no-default-features[do not build the default features]' \ - '--no-run[compile but do not run]' \ - '(-p,--package)'{-p=,--package=}'[package to run benchmarks for]:packages:_get_package_names' \ - '--target=[target triple]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - build) - _arguments \ - '--features=[space separated feature list]' \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ - '--manifest-path=[path to manifest]' \ - '--no-default-features[do not build the default features]' \ - '(-p,--package)'{-p=,--package=}'[package to build]:packages:_get_package_names' \ - '--release=[build in release mode]' \ - '--target=[target triple]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - clean) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - '(-p,--package)'{-p=,--package=}'[package to clean]:packages:_get_package_names' \ - '--target=[target triple(default:all)]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - config-for-key) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--human[]' \ - '--key=[key]' \ - ;; - - config-list) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--human[]' \ - ;; - - doc) - _arguments \ - '--features=[space separated feature list]' \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ - '--manifest-path=[path to manifest]' \ - '--no-deps[do not build docs for dependencies]' \ - '--no-default-features[do not build the default features]' \ - '--open[oen docs in browser after the build]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - fetch) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - generate-lockfile) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - git-checkout) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--reference=[REF]' \ - '--url=[URL]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - help) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '*: :_cargo_cmds' \ - ;; - - locate-project) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - ;; - - login) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--host=[Host to set the token for]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - new) - _arguments \ - '--bin[use binary template]' \ - '--git[initialize new git repo]' \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--hg[initialize new mercurial repo]' \ - '--no-git[no new git repo]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - owner) - _arguments \ - '(-a, --add)'{-a,--add}'[add owner LOGIN]' \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--index[registry index]' \ - '(-r, --remove)'{-r,--remove}'[remove owner LOGIN]' \ - '--token[API token]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - package) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - '--no-verify[do not build to verify contents]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - pkgid) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - publish) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--host=[Host to set the token for]' \ - '--manifest-path=[path to manifest]' \ - '--no-verify[Do not verify tarball until before publish]' \ - '--token[Token to use when uploading]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - read-manifest) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - run) - _arguments \ - '--example=[name of the bin target]' \ - '--features=[space separated feature list]' \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ - '--manifest-path=[path to manifest]' \ - '--bin=[name of the bin target]' \ - '--no-default-features[do not build the default features]' \ - '--release=[build in release mode]' \ - '--target=[target triple]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - '*: :_normal' \ - ;; - - test) - _arguments \ - '--features=[space separated feature list]' \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '(-j, --jobs)'{-j,--jobs}'[number of jobs to run in parallel]' \ - '--manifest-path=[path to manifest]' \ - '--test=[test name]: :_test_names' \ - '--no-default-features[do not build the default features]' \ - '--no-run[compile but do not run]' \ - '(-p,--package)'{-p=,--package=}'[package to run tests for]:packages:_get_package_names' \ - '--target=[target triple]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - '1: :_test_names' \ - ;; - - update) - _arguments \ - '--aggressive=[force dependency update]' \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - '(-p,--package)'{-p=,--package=}'[package to update]:packages:__get_package_names' \ - '--precise=[update single dependency to PRECISE]: :' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - verify-project) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--manifest-path=[path to manifest]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - version) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - ;; - - yank) - _arguments \ - '(-h, --help)'{-h,--help}'[show help message]' \ - '--index[registry index]' \ - '--token[API token]' \ - '--undo[undo yank]' \ - '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ - '--color=[colorization option]' \ - '--vers[yank version]' \ - ;; - esac - ;; -esac + local state + typeset -A opt_args + + # leading items in parentheses are an exclusion list for the arguments following that arg + # See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions + # - => exclude all other options + # 1 => exclude positional arg 1 + # * => exclude all other args + # +blah => exclude +blah + _arguments \ + '(- 1 *)'{-h,--help}'[show help message]' \ + '(- 1 *)--list[list installed commands]' \ + '(- 1 *)'{-V,--version}'[show version information]' \ + {-v,--verbose}'[use verbose output]' \ + --color'[colorization option]' \ + '(+beta +nightly)+stable[use the stable toolchain]' \ + '(+stable +nightly)+beta[use the beta toolchain]' \ + '(+stable +beta)+nightly[use the nightly toolchain]' \ + '1: :_cargo_cmds' \ + '*:: :->args' + + case $state in + args) + case ${words[1]} in + bench) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not build the default features]' \ + '--no-run[compile but do not run]' \ + '(-p,--package)'{-p=,--package=}'[package to run benchmarks for]:packages:_get_package_names' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + build) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not build the default features]' \ + '(-p,--package)'{-p=,--package=}'[package to build]:packages:_get_package_names' \ + '--release=[build in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + check) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + "${command_scope_spec[@]}" \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-default-features[do not check the default features]' \ + '(-p,--package)'{-p=,--package=}'[package to check]:packages:_get_package_names' \ + '--release=[check in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + clean) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-p,--package)'{-p=,--package=}'[package to clean]:packages:_get_package_names' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[whether or not to clean release artifacts]' \ + '--target=[target triple(default:all)]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + doc) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-deps[do not build docs for dependencies]' \ + '--no-default-features[do not build the default features]' \ + '--document-private-items[include non-public items in the documentation]' \ + '--open[open docs in browser after the build]' \ + '(-p, --package)'{-p,--package}'=[package to document]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[build for the target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + fetch) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + generate-lockfile) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + git-checkout) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--reference=[REF]' \ + '--url=[URL]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + help) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '*: :_cargo_cmds' \ + ;; + + init) + _arguments \ + '--lib[use library template]' \ + '--vcs:initialize a new repo with a given VCS:(git hg none)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--name=[set the resulting package name]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + install) + _arguments \ + '--bin=[only install the specified binary]' \ + '--branch=[branch to use when installing from git]' \ + '--color=:colorization option:(auto always never)' \ + '--debug[build in debug mode instead of release mode]' \ + '--example[install the specified example instead of binaries]' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '--git=[URL from which to install the crate]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--no-default-features[do not build the default features]' \ + '--path=[local filesystem path to crate to install]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--rev=[specific commit to use when installing from git]' \ + '--root=[directory to install packages into]: :_files -/' \ + '--tag=[tag to use when installing from git]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--vers=[version to install from crates.io]' \ + ;; + + locate-project) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + ;; + + login) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[Host to set the token for]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + metadata) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "--no-deps[output information only about the root package and don't fetch dependencies]" \ + '--no-default-features[do not include the default feature]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '--format-version=[format version(default: 1)]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + new) + _arguments \ + '--lib[use library template]' \ + '--vcs:initialize a new repo with a given VCS:(git hg none)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--name=[set the resulting package name]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + owner) + _arguments \ + '(-a, --add)'{-a,--add}'[add owner LOGIN]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--index[registry index]' \ + '(-l, --list)'{-l,--list}'[list owners of a crate]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-r, --remove)'{-r,--remove}'[remove owner LOGIN]' \ + '--token[API token to use when authenticating]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + package) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-l, --list)'{-l,--list}'[print files included in a package without making one]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-metadata[ignore warnings about a lack of human-usable metadata]' \ + '--no-verify[do not build to verify contents]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + pkgid) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + publish) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[Host to set the token for]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--no-verify[Do not verify tarball until before publish]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--token[token to use when uploading]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + read-manifest) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + run) + _arguments \ + '--example=[name of the bin target]' \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--bin=[name of the bin target]' \ + '--no-default-features[do not build the default features]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release=[build in release mode]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '*: :_normal' \ + ;; + + rustc) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '--features=[features to compile for the package]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'=[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to the manifest to fetch dependencies for]: :_files -/' \ + '--no-default-features[do not compile default features for the package]' \ + '(-p, --package)'{-p,--package}'=[profile to compile for]' \ + '--profile=[profile to build the selected target for]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[target triple which compiles will be for]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "${command_scope_spec[@]}" \ + ;; + + rustdoc) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '--features=[space-separated list of features to also build]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'=[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to the manifest to document]: :_files -/' \ + '--no-default-features[do not build the `default` feature]' \ + '--document-private-items[include non-public items in the documentation]' \ + '--open[open the docs in a browser after the operation]' \ + '(-p, --package)'{-p,--package}'=[package to document]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[build for the target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + "${command_scope_spec[@]}" \ + ;; + + search) + _arguments \ + '--color=:colorization option:(auto always never)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--host=[host of a registry to search in]' \ + '--limit=[limit the number of results]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + ;; + + test) + _arguments \ + '--features=[space separated feature list]' \ + '--all-features[enable all available features]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-j, --jobs)'{-j,--jobs}'[number of parallel jobs, defaults to # of CPUs]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '--test=[test name]: :_test_names' \ + '--no-default-features[do not build the default features]' \ + '--no-fail-fast[run all tests regardless of failure]' \ + '--no-run[compile but do not run]' \ + '(-p,--package)'{-p=,--package=}'[package to run tests for]:packages:_get_package_names' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--release[build artifacts in release mode, with optimizations]' \ + '--target=[target triple]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '1: :_test_names' \ + '(--doc --bin --example --test --bench)--lib[only test library]' \ + '(--lib --bin --example --test --bench)--doc[only test documentation]' \ + '(--lib --doc --example --test --bench)--bin=[binary name]' \ + '(--lib --doc --bin --test --bench)--example=[example name]' \ + '(--lib --doc --bin --example --bench)--test=[test name]' \ + '(--lib --doc --bin --example --test)--bench=[benchmark name]' \ + '--message-format:error format:(human json short)' \ + '--frozen[require lock and cache up to date]' \ + '--locked[require lock up to date]' + ;; + + uninstall) + _arguments \ + '--bin=[only uninstall the binary NAME]' \ + '--color=:colorization option:(auto always never)' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-q, --quiet)'{-q,--quiet}'[less output printed to stdout]' \ + '--root=[directory to uninstall packages from]: :_files -/' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + ;; + + update) + _arguments \ + '--aggressive=[force dependency update]' \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-p,--package)'{-p=,--package=}'[package to update]:packages:_get_package_names' \ + '--precise=[update single dependency to PRECISE]: :' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + verify-project) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--manifest-path=[path to manifest]: :_files -/' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + version) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + ;; + + yank) + _arguments \ + '(-h, --help)'{-h,--help}'[show help message]' \ + '--index[registry index]' \ + '(-q, --quiet)'{-q,--quiet}'[no output printed to stdout]' \ + '--token[API token to use when authenticating]' \ + '--undo[undo a yank, putting a version back into the index]' \ + '(-v, --verbose)'{-v,--verbose}'[use verbose output]' \ + '--color=:colorization option:(auto always never)' \ + '--vers[yank version]' \ + ;; + esac + ;; + esac } -_cargo_cmds(){ -local -a commands;commands=( -'bench:execute all benchmarks of a local package' -'build:compile the current project' -'clean:remove generated artifacts' -'config-for-key:print key from cargo config file' -'config-list:print all config from cargo config file' -'doc:build package documentation' -'fetch:fetch package dependencies' -'generate-lockfile:create lockfile' -'git-checkout:git checkout' -'help:get help for commands' -'locate-project:print "Cargo.toml" location' -'login:login to remote server' -'new:create a new project' -'owner:manage the owners of a crate on the registry' -'package:assemble local package into a distributable tarball' -'pkgid:print a fully qualified package specification' -'publish:upload package to the registry' -'read-manifest:print manifest in JSON format' -'run:run the main binary of the local package' -'test:execute all unit and tests of a local package' -'update:update dependencies' -'verify-project:check Cargo.toml' -'version:show version information' -'yank:remove pushed file from index' -) -_describe 'command' commands - +_cargo_cmds() { + local -a commands + # This uses Parameter Expansion Flags, which is an Zsh built-in features. + # See more: http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags + # and http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion + # + # # How this work? + # + # First it splits the result of `cargo --list` at newline, then it removes the first line. + # Then it removes indentation (4 whitespaces) before each items. (Note the x## pattern [1]). + # Then it replaces those spaces between item and description with a `:` + # + # [1]: https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org#patterns + commands=( ${${${(M)"${(f)$(cargo --list)}":# *}/ ##/}/ ##/:} ) + _describe 'command' commands } #FIXME: Disabled until fixed #gets package names from the manifest file -_get_package_names() -{ +_get_package_names() { + : } #TODO:see if it makes sense to have 'locate-project' to have non-json output. #strips package name from json stuff -_locate_manifest(){ -local manifest=`cargo locate-project 2>/dev/null` -regexp-replace manifest '\{"root":"|"\}' '' -echo $manifest +_locate_manifest() { + local manifest=$(cargo locate-project 2>/dev/null) + regexp-replace manifest '\{"root":"|"\}' '' + echo "$manifest" } -# Extracts the values of "name" from the array given in $1 and shows them as +# Extracts the values of "name" from the array given in $1 and shows them as # command line options for completion -_get_names_from_array() -{ - local -a filelist; +_get_names_from_array() { local manifest=$(_locate_manifest) if [[ -z $manifest ]]; then return 0 @@ -320,40 +465,48 @@ _get_names_from_array() local in_block=false local block_name=$1 names=() - while read line - do + while read -r line; do if [[ $last_line == "[[$block_name]]" ]]; then in_block=true - else - if [[ $last_line =~ '.*\[\[.*' ]]; then + else + if [[ $last_line =~ '\s*\[\[.*' ]]; then in_block=false fi fi if [[ $in_block == true ]]; then - if [[ $line =~ '.*name.*=' ]]; then - regexp-replace line '^.*name *= *|"' "" - names+=$line + if [[ $line =~ '\s*name\s*=' ]]; then + regexp-replace line '^\s*name\s*=\s*|"' '' + names+=( "$line" ) fi - fi + fi last_line=$line - done < $manifest - _describe $block_name names + done < "$manifest" + _describe "$block_name" names } #Gets the test names from the manifest file -_test_names() -{ +_test_names() { _get_names_from_array "test" } #Gets the bench names from the manifest file -_benchmark_names() -{ +_benchmark_names() { _get_names_from_array "bench" } +# These flags are mutually exclusive specifiers for the scope of a command; as +# they are used in multiple places without change, they are expanded into the +# appropriate command's `_arguments` where appropriate. +set command_scope_spec +command_scope_spec=( + '(--bin --example --test --lib)--bench=[benchmark name]: :_benchmark_names' + '(--bench --bin --test --lib)--example=[example name]' + '(--bench --example --test --lib)--bin=[binary name]' + '(--bench --bin --example --test)--lib=[library name]' + '(--bench --bin --example --lib)--test=[test name]' +) _cargo diff --git a/src/etc/cargo.1 b/src/etc/cargo.1 deleted file mode 100644 index 25aade5f48c..00000000000 --- a/src/etc/cargo.1 +++ /dev/null @@ -1,96 +0,0 @@ -.TH CARGO "1" "September 2014" "cargo 0.0.1-pre" "User Commands" -.SH NAME -cargo \- The Rust package manager -.SH SYNOPSIS -.B cargo - [...] - -.B cargo -[\fIOPTIONS\fR] - -.SH DESCRIPTION -This program is a package manager for the Rust language, available at -<\fBhttp://rust-lang.org\fR>. - -.SH OPTIONS - -.TP -\fB\-h, \-\-help\fR -Display a help message -.TP -\fB\-V, \-\-version\fR -Print version information and exit -.TP -\fB\-\-list\fR -List all available cargo commands -.TP -\fB\-v, \-\-verbose\fR -Use verbose output -.TP -\fB\-\-color\fR -Configure coloring of output - -.SH COMMANDS - -To get extended information about commands, run 'cargo help ' - -.TP -\fBcargo build\fR -Compile the current project -.TP -\fBcargo clean\fR -Remove the target directory with build output -.TP -\fBcargo doc\fR -Build this project's and its dependencies' documentation -.TP -\fBcargo new\fR -Create a new cargo project -.TP -\fBcargo run\fR -Build and execute src/main.rs -.TP -\fBcargo test\fR -Run the tests for the package -.TP -\fBcargo bench\fR -Run the benchmarks for the package -.TP -\fBcargo update\fR -Update dependencies in Cargo.lock -.TP -\fBcargo package\fR -Generate a source tarball for the current package -.TP -\fBcargo version\fR -Print cargo's version and exit - -.SH FILES - -.TP -~/.cargo -Directory in which Cargo stores repository data. Cargo can be instructed to use a .cargo subdirectory in a different location by setting the CARGO_HOME environment variable. - -.SH "EXAMPLES" -Build a local package and all of its dependencies - $ cargo build - -Build a package with optimizations - $ cargo build --release - -Run tests for a cross-compiled target - $ cargo test --target i686-unknown-linux-gnu - -Learn about a command's options and usage - $ cargo help clean - -.SH "SEE ALSO" - -rustc(1), rustdoc(1) - -.SH "BUGS" -See <\fBhttps://github.com/rust-lang/cargo/issues\fR> for issues. - -.SH "COPYRIGHT" -This work is dual-licensed under Apache 2.0 and MIT terms. See \fBCOPYRIGHT\fR -file in the cargo source distribution. diff --git a/src/etc/cargo.bashcomp.sh b/src/etc/cargo.bashcomp.sh index 2d8a421a6cf..793c5c0f21e 100644 --- a/src/etc/cargo.bashcomp.sh +++ b/src/etc/cargo.bashcomp.sh @@ -1,76 +1,146 @@ +# Required for bash versions < 4.1 +# Default bash version is 3.2 on latest macOS. See #6874 +shopt -s extglob + command -v cargo >/dev/null 2>&1 && _cargo() { - local cur prev words cword cmd + local cur prev words cword _get_comp_words_by_ref cur prev words cword COMPREPLY=() - cmd=${words[1]} + # Skip past - and + options to find the command. + local nwords=${#words[@]} + local cmd_i cmd dd_i + for (( cmd_i=1; cmd_i<$nwords; cmd_i++ )); + do + if [[ ! "${words[$cmd_i]}" =~ ^[+-] ]]; then + cmd="${words[$cmd_i]}" + break + fi + done + # Find the location of the -- separator. + for (( dd_i=1; dd_i<$nwords-1; dd_i++ )); + do + if [[ "${words[$dd_i]}" = "--" ]]; then + break + fi + done - local vcs='git hg none' + local vcs='git hg none pijul fossil' + local color='auto always never' + local msg_format='human json short' local opt_help='-h --help' local opt_verbose='-v --verbose' local opt_quiet='-q --quiet' local opt_color='--color' local opt_common="$opt_help $opt_verbose $opt_quiet $opt_color" + local opt_pkg_spec='-p --package --all --exclude' local opt_pkg='-p --package' - local opt_feat='--features --no-default-features' + local opt_feat='--features --all-features --no-default-features' local opt_mani='--manifest-path' local opt_jobs='-j --jobs' + local opt_force='-f --force' + local opt_test='--test --bench' + local opt_lock='--frozen --locked' + local opt_targets="--lib --bin --bins --example --examples --test --tests --bench --benches --all-targets" - local opt___nocmd="$opt_common -V --version --list" - local opt__bench="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --no-run" - local opt__build="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --release" - local opt__clean="$opt_common $opt_pkg $opt_mani --target" - local opt__doc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --open --no-deps" - local opt__fetch="$opt_common $opt_mani" + local opt___nocmd="$opt_common -V --version --list --explain" + local opt__bench="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test $opt_targets --message-format --target --no-run --no-fail-fast --target-dir" + local opt__build="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test $opt_targets --message-format --target --release --target-dir" + local opt__check="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test $opt_targets --message-format --target --release --profile --target-dir" + local opt__clean="$opt_common $opt_pkg $opt_mani $opt_lock --target --release --doc --target-dir" + local opt__doc="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --bin --bins --lib --target --open --no-deps --release --document-private-items --target-dir" + local opt__fetch="$opt_common $opt_mani $opt_lock" + local opt__fix="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_jobs $opt_targets $opt_lock --release --target --message-format --prepare-for --broken-code --edition --edition-idioms --allow-no-vcs --allow-dirty --allow-staged --profile --target-dir" local opt__generate_lockfile="${opt__fetch}" - local opt__git_checkout="$opt_common --reference --url" + local opt__git_checkout="$opt_common $opt_lock --reference --url" + local opt__help="$opt_help" + local opt__init="$opt_common $opt_lock --bin --lib --name --vcs --edition --registry" + local opt__install="$opt_common $opt_feat $opt_jobs $opt_lock $opt_force --bin --bins --branch --debug --example --examples --git --list --path --rev --root --tag --version --registry --target" local opt__locate_project="$opt_mani -h --help" - local opt__login="$opt_common --host" - local opt__new="$opt_common --vcs --bin --name" - local opt__owner="$opt_common -a --add -r --remove -l --list --index --token" - local opt__pkgid="${opt__fetch}" - local opt__publish="$opt_common $opt_mani --host --token --no-verify" - local opt__read_manifest="$opt_help $opt_verbose $opt_mani" - local opt__run="$opt_common $opt_feat $opt_mani $opt_jobs --target --bin --example --release" - local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --release" - local opt__search="$opt_common --host" - local opt__test="$opt_common $opt_pkg $opt_feat $opt_mani $opt_jobs --target --lib --bin --test --bench --example --no-run --release" - local opt__update="$opt_common $opt_pkg $opt_mani --aggressive --precise" - local opt__package="$opt_common $opt_mani -l --list --no-verify --no-metadata" + local opt__login="$opt_common $opt_lock --host --registry" + local opt__metadata="$opt_common $opt_feat $opt_mani $opt_lock --format-version=1 --no-deps" + local opt__new="$opt_common $opt_lock --vcs --bin --lib --name --edition --registry" + local opt__owner="$opt_common $opt_lock -a --add -r --remove -l --list --index --token --registry" + local opt__package="$opt_common $opt_mani $opt_feat $opt_lock $opt_jobs --allow-dirty -l --list --no-verify --no-metadata --target --target-dir" + local opt__pkgid="${opt__fetch} $opt_pkg" + local opt__publish="$opt_common $opt_mani $opt_feat $opt_lock $opt_jobs --allow-dirty --dry-run --host --token --no-verify --index --registry --target --target-dir" + local opt__read_manifest="$opt_help $opt_quiet $opt_verbose $opt_mani $opt_color " + local opt__run="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --target --bin --example --release --target-dir" + local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test $opt_targets --message-format --profile --target --release --target-dir" + local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test $opt_targets --message-format --target --release --open --target-dir" + local opt__search="$opt_common $opt_lock --host --limit --index --limit --registry" + local opt__test="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_test $opt_targets --message-format --doc --target --no-run --release --no-fail-fast --target-dir" + local opt__uninstall="$opt_common $opt_lock $opt_pkg_spec --bin --root" + local opt__update="$opt_common $opt_pkg_spec $opt_mani $opt_lock --aggressive --precise --dry-run" local opt__verify_project="${opt__fetch}" - local opt__version="$opt_help $opt_verbose" - local opt__yank="$opt_common --vers --undo --index --token" + local opt__version="$opt_help $opt_verbose $opt_color" + local opt__yank="$opt_common $opt_lock --vers --undo --index --token --registry" + local opt__libtest="--help --include-ignored --ignored --test --bench --list --logfile --nocapture --test-threads --skip -q --quiet --exact --color --format" - if [[ $cword -eq 1 ]]; then + if [[ $cword -gt $dd_i ]]; then + # Completion after -- separator. + if [[ "${cmd}" = @(test|bench) ]]; then + COMPREPLY=( $( compgen -W "${opt__libtest}" -- "$cur" ) ) + else + # Fallback to filename completion, useful with `cargo run`. + _filedir + fi + elif [[ $cword -le $cmd_i ]]; then + # Completion before or at the command. if [[ "$cur" == -* ]]; then COMPREPLY=( $( compgen -W "${opt___nocmd}" -- "$cur" ) ) + elif [[ "$cur" == +* ]]; then + COMPREPLY=( $( compgen -W "$(_toolchains)" -- "$cur" ) ) else COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) fi - elif [[ $cword -ge 2 ]]; then + else case "${prev}" in --vcs) COMPREPLY=( $( compgen -W "$vcs" -- "$cur" ) ) ;; + --color) + COMPREPLY=( $( compgen -W "$color" -- "$cur" ) ) + ;; + --message-format) + COMPREPLY=( $( compgen -W "$msg_format" -- "$cur" ) ) + ;; --manifest-path) _filedir toml ;; + --bin) + COMPREPLY=( $( compgen -W "$(_bin_names)" -- "$cur" ) ) + ;; + --test) + COMPREPLY=( $( compgen -W "$(_test_names)" -- "$cur" ) ) + ;; + --bench) + COMPREPLY=( $( compgen -W "$(_benchmark_names)" -- "$cur" ) ) + ;; --example) COMPREPLY=( $( compgen -W "$(_get_examples)" -- "$cur" ) ) ;; --target) COMPREPLY=( $( compgen -W "$(_get_targets)" -- "$cur" ) ) ;; + --target-dir) + _filedir -d + ;; help) COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) ;; *) local opt_var=opt__${cmd//-/_} - COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) ) + if [[ -z "${!opt_var}" ]]; then + # Fallback to filename completion. + _filedir + else + COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) ) + fi ;; esac fi @@ -81,16 +151,74 @@ _cargo() } && complete -F _cargo cargo -__cargo_commands=$(cargo --list | tail -n +2) +__cargo_commands=$(cargo --list 2>/dev/null | awk 'NR>1 {print $1}') _locate_manifest(){ local manifest=`cargo locate-project 2>/dev/null` # regexp-replace manifest '\{"root":"|"\}' '' - echo ${manifest:9:-2} + echo ${manifest:9:${#manifest}-11} +} + +# Extracts the values of "name" from the array given in $1 and shows them as +# command line options for completion +_get_names_from_array() +{ + local manifest=$(_locate_manifest) + if [[ -z $manifest ]]; then + return 0 + fi + + local last_line + local -a names + local in_block=false + local block_name=$1 + while read line + do + if [[ $last_line == "[[$block_name]]" ]]; then + in_block=true + else + if [[ $last_line =~ .*\[\[.* ]]; then + in_block=false + fi + fi + + if [[ $in_block == true ]]; then + if [[ $line =~ .*name.*\= ]]; then + line=${line##*=} + line=${line%%\"} + line=${line##*\"} + names+=($line) + fi + fi + + last_line=$line + done < $manifest + echo "${names[@]}" +} + +#Gets the bin names from the manifest file +_bin_names() +{ + _get_names_from_array "bin" +} + +#Gets the test names from the manifest file +_test_names() +{ + _get_names_from_array "test" +} + +#Gets the bench names from the manifest file +_benchmark_names() +{ + _get_names_from_array "bench" } _get_examples(){ - local files=($(dirname $(_locate_manifest))/examples/*.rs) + local manifest=$(_locate_manifest) + [ -z "$manifest" ] && return 0 + + local files=("${manifest%/*}"/examples/*.rs) local names=("${files[@]##*/}") local names=("${names[@]%.*}") # "*" means no examples found @@ -100,30 +228,39 @@ _get_examples(){ } _get_targets(){ - local CURRENT_PATH - if [ `uname -o` == "Cygwin" -a -f "$PWD"/Cargo.toml ]; then - CURRENT_PATH=$PWD - else - CURRENT_PATH=$(_locate_manifest) - fi - if [[ -z "$CURRENT_PATH" ]]; then - return 1 - fi - local TARGETS=() - local FIND_PATHS=( "/" ) - local FIND_PATH LINES LINE - while [[ "$CURRENT_PATH" != "/" ]]; do - FIND_PATHS+=( "$CURRENT_PATH" ) - CURRENT_PATH=$(dirname $CURRENT_PATH) - done - for FIND_PATH in ${FIND_PATHS[@]}; do - if [[ -f "$FIND_PATH"/.cargo/config ]]; then - LINES=( `grep "$FIND_PATH"/.cargo/config -e "^\[target\."` ) - for LINE in ${LINES[@]}; do - TARGETS+=(`sed 's/^\[target\.\(.*\)\]$/\1/' <<< $LINE`) - done - fi - done - echo "${TARGETS[@]}" + local result=() + local targets=$(rustup target list) + while read line + do + if [[ "$line" =~ default|installed ]]; then + result+=("${line%% *}") + fi + done <<< "$targets" + echo "${result[@]}" } + +_toolchains(){ + local result=() + local toolchains=$(rustup toolchain list) + local channels="nightly|beta|stable|[0-9]\.[0-9]{1,2}\.[0-9]" + local date="[0-9]{4}-[0-9]{2}-[0-9]{2}" + while read line + do + # Strip " (default)" + line=${line%% *} + if [[ "$line" =~ ^($channels)(-($date))?(-.*) ]]; then + if [[ -z ${BASH_REMATCH[3]} ]]; then + result+=("+${BASH_REMATCH[1]}") + else + # channel-date + result+=("+${BASH_REMATCH[1]}-${BASH_REMATCH[3]}") + fi + result+=("+$line") + else + result+=("+$line") + fi + done <<< "$toolchains" + echo "${result[@]}" +} + # vim:ft=sh diff --git a/src/etc/dl-snapshot.py b/src/etc/dl-snapshot.py deleted file mode 100644 index b279f536a89..00000000000 --- a/src/etc/dl-snapshot.py +++ /dev/null @@ -1,96 +0,0 @@ -import download -import hashlib -import os -import re -import shutil -import sys - -datere = re.compile('^\d{4}-\d{2}-\d{2}') -cksumre = re.compile('^ ([^ ]+) ([^$]+)$') - -current = None -snaps = {} -with open('src/snapshots.txt') as f: - for line in iter(f): - line = line.rstrip() - m = datere.match(line) - if m: - current = m.group() - snaps[current] = {} - continue - - m = cksumre.match(line) - if m: - snaps[current][m.group(1)] = m.group(2) - continue - - # This script currently doesn't look at older snapshots, so there is - # no need to look past the first section. - break - -date = current -triple = sys.argv[1] - -ts = triple.split('-') -arch = ts[0] - -if (arch == 'i586') or (arch == 'i386'): - arch = 'i686' - -if len(ts) == 2: - vendor = 'unknown' - target_os = ts[1] -else: - vendor = ts[1] - target_os = ts[2] - -# NB: The platform format differs from the triple format, to support -# bootstrapping multiple triples from the same snapshot. -plat_arch = arch if (arch != 'i686') else 'i386' -plat_os = target_os -if (target_os == 'windows'): - plat_os = 'winnt' -elif (target_os == 'darwin'): - plat_os = 'macos' -platform = "%s-%s" % (plat_os, plat_arch) -if platform not in snaps[date]: - raise Exception("no snapshot for the triple '%s'" % triple) - -# Reconstitute triple with any applicable changes. For historical reasons -# this differs from the snapshots.txt platform name. -if target_os == 'linux': - target_os = 'linux-gnu' -elif target_os == 'darwin': - vendor = 'apple' -elif target_os == 'windows': - vendor = 'pc' - target_os = 'windows-gnu' -triple = "%s-%s-%s" % (arch, vendor, target_os) -hash = snaps[date][platform] - -tarball = 'cargo-nightly-' + triple + '.tar.gz' -url = 'https://static.rust-lang.org/cargo-dist/%s/%s' % \ - (date.strip(), tarball) -dl_path = "target/dl/" + tarball -dst = "target/snapshot" - -if not os.path.isdir('target/dl'): - os.makedirs('target/dl') - -if os.path.isdir(dst): - shutil.rmtree(dst) - -exists = False -if os.path.exists(dl_path): - h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() - if h == hash: - print("file already present %s (%s)" % (dl_path, hash,)) - exists = True - -if not exists: - download.get(url, dl_path) - h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() - if h != hash: - raise Exception("failed to verify the checksum of the snapshot") - -download.unpack(dl_path, dst) diff --git a/src/etc/download.py b/src/etc/download.py deleted file mode 100644 index 8072737827b..00000000000 --- a/src/etc/download.py +++ /dev/null @@ -1,52 +0,0 @@ -import contextlib -import os -import shutil -import subprocess -import sys -import tarfile - - -def get(url, path, quiet=False): - # see http://serverfault.com/questions/301128/how-to-download - if sys.platform == 'win32': - run(["PowerShell.exe", "/nologo", "-Command", - "(New-Object System.Net.WebClient).DownloadFile('" + url + - "', '" + path + "')"], quiet=quiet) - else: - run(["curl", "-o", path, url], quiet=quiet) - - -def unpack(tarball, dst, quiet=False): - if quiet: - print("extracting " + tarball) - fname = os.path.basename(tarball).replace(".tar.gz", "") - with contextlib.closing(tarfile.open(tarball)) as tar: - for p in tar.getnames(): - name = p.replace(fname + "/", "", 1) - fp = os.path.join(dst, name) - if not quiet: - print("extracting " + p) - tar.extract(p, dst) - tp = os.path.join(dst, p) - if os.path.isdir(tp) and os.path.exists(fp): - continue - shutil.move(tp, fp) - shutil.rmtree(os.path.join(dst, fname)) - - -def run(args, quiet=False): - if not quiet: - print("running: " + ' '.join(args)) - sys.stdout.flush() - # Use Popen here instead of call() as it apparently allows powershell on - # Windows to not lock up waiting for input presumably. - ret = subprocess.Popen(args, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - out, err = ret.communicate() - code = ret.wait() - if code != 0: - print("stdout: \n\n" + out) - print("stderr: \n\n" + err) - raise Exception("failed to fetch url") diff --git a/src/etc/install-deps.py b/src/etc/install-deps.py deleted file mode 100644 index 1d73f69cae9..00000000000 --- a/src/etc/install-deps.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python - -import contextlib -import download -import os -import shutil -import sys -import tarfile - -if os.environ.get('BITS') == '32': - host_bits = 'i686' - extra_bits = 'x86_64' -else: - host_bits = 'x86_64' - extra_bits = 'i686' - -extra = None -libdir = 'lib' - -# Figure out our target triple -if sys.platform == 'linux' or sys.platform == 'linux2': - host = host_bits + '-unknown-linux-gnu' - extra = extra_bits + '-unknown-linux-gnu' -elif sys.platform == 'darwin': - host = host_bits + '-apple-darwin' - extra = extra_bits + '-apple-darwin' -elif sys.platform == 'win32': - libdir = 'bin' - if os.environ.get('MSVC') == '1': - host = host_bits + '-pc-windows-msvc' - extra = extra_bits + '-pc-windows-msvc' - else: - host = host_bits + '-pc-windows-gnu' -else: - raise "Unknown platform" - -rust_date = open('src/rustversion.txt').read().strip() -url = 'https://static.rust-lang.org/dist/' + rust_date - - -def install_via_tarballs(): - if os.path.isdir("rustc-install"): - shutil.rmtree("rustc-install") - - host_fname = 'rustc-nightly-' + host + '.tar.gz' - download.get(url + '/' + host_fname, host_fname) - download.unpack(host_fname, "rustc-install", quiet=True) - os.remove(host_fname) - - if extra is not None: - extra_fname = 'rustc-nightly-' + extra + '.tar.gz' - print("adding target libs for " + extra) - download.get(url + '/' + extra_fname, extra_fname) - folder = extra_fname.replace(".tar.gz", "") - with contextlib.closing(tarfile.open(extra_fname)) as tar: - for p in tar.getnames(): - if not "rustc/" + libdir + "/rustlib/" + extra in p: - continue - name = p.replace(folder + "/", "", 1) - dst = "rustc-install/" + name - tar.extract(p, "rustc-install") - tp = os.path.join("rustc-install", p) - if os.path.isdir(tp) and os.path.exists(dst): - continue - shutil.move(tp, dst) - shutil.rmtree("rustc-install/" + folder) - os.remove(extra_fname) - - if os.path.isdir("rustc"): - shutil.rmtree("rustc") - os.rename("rustc-install/rustc", "rustc") - shutil.rmtree("rustc-install") - -install_via_tarballs() diff --git a/src/etc/man/cargo-bench.1 b/src/etc/man/cargo-bench.1 new file mode 100644 index 00000000000..629273951ba --- /dev/null +++ b/src/etc/man/cargo-bench.1 @@ -0,0 +1,518 @@ +'\" t +.\" Title: cargo-bench +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-05-08 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-BENCH" "1" "2019-05-08" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-bench \- Execute benchmarks of a package +.SH "SYNOPSIS" +.sp +\fBcargo bench [\fIOPTIONS\fP] [BENCHNAME] [\-\- \fIBENCH\-OPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +Compile and execute benchmarks. +.sp +The benchmark filtering argument \fBBENCHNAME\fP and all the arguments following +the two dashes (\fB\-\-\fP) are passed to the benchmark binaries and thus to +\fIlibtest\fP (rustc\(cqs built in unit\-test and micro\-benchmarking framework). If +you\(cqre passing arguments to both Cargo and the binary, the ones after \fB\-\-\fP go +to the binary, the ones before go to Cargo. For details about libtest\(cqs +arguments see the output of \fBcargo bench \(em \-\-help\fP. As an example, this will +run only the benchmark named \fBfoo\fP (and skip other similarly named benchmarks +like \fBfoobar\fP): +.sp +.if n .RS 4 +.nf +cargo bench \-\- foo \-\-exact +.fi +.if n .RE +.sp +Benchmarks are built with the \fB\-\-test\fP option to \fBrustc\fP which creates an +executable with a \fBmain\fP function that automatically runs all functions +annotated with the \fB#[bench]\fP attribute. Cargo passes the \fB\-\-bench\fP flag to +the test harness to tell it to run only benchmarks. +.sp +The libtest harness may be disabled by setting \fBharness = false\fP in the target +manifest settings, in which case your code will need to provide its own \fBmain\fP +function to handle running benchmarks. +.SH "OPTIONS" +.SS "Benchmark Options" +.sp +\fB\-\-no\-run\fP +.RS 4 +Compile, but don\(cqt run benchmarks. +.RE +.sp +\fB\-\-no\-fail\-fast\fP +.RS 4 +Run all benchmarks regardless of failure. Without this flag, Cargo will exit +after the first executable fails. The Rust test harness will run all +benchmarks within the executable to completion, this flag only applies to +the executable as a whole. +.RE +.SS "Package Selection" +.sp +By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (\fB\-\-all\fP is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the \fBworkspace.default\-members\fP key in the root \fBCargo.toml\fP +manifest. +.sp +\fB\-p\fP \fISPEC\fP..., \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Benchmark only the specified packages. See \fBcargo\-pkgid\fP(1) for the +SPEC format. This flag may be specified multiple times. +.RE +.sp +\fB\-\-all\fP +.RS 4 +Benchmark all members in the workspace. +.RE +.sp +\fB\-\-exclude\fP \fISPEC\fP... +.RS 4 +Exclude the specified packages. Must be used in conjunction with the +\fB\-\-all\fP flag. This flag may be specified multiple times. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo bench\fP will build the +following targets of the selected packages: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +lib — used to link with binaries and benchmarks +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +bins (only if benchmark targets are built and required features are +available) +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +lib as a benchmark +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +bins as benchmarks +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +benchmark targets +.RE +.sp +The default behavior can be changed by setting the \fBbench\fP flag for the target +in the manifest settings. Setting examples to \fBbench = true\fP will build and +run the example as a benchmark. Setting targets to \fBbench = false\fP will stop +them from being benchmarked by default. Target selection options that take a +target by name ignore the \fBbench\fP flag and will always benchmark the given +target. +.sp +Passing target selection flags will benchmark only the +specified targets. +.sp +\fB\-\-lib\fP +.RS 4 +Benchmark the package\(cqs library. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Benchmark the specified binary. This flag may be specified multiple times. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Benchmark all binary targets. +.RE +.sp +\fB\-\-example\fP \fINAME\fP... +.RS 4 +Benchmark the specified example. This flag may be specified multiple times. +.RE +.sp +\fB\-\-examples\fP +.RS 4 +Benchmark all example targets. +.RE +.sp +\fB\-\-test\fP \fINAME\fP... +.RS 4 +Benchmark the specified integration test. This flag may be specified multiple +times. +.RE +.sp +\fB\-\-tests\fP +.RS 4 +Benchmark all targets in test mode that have the \fBtest = true\fP manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the \fBtest\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-bench\fP \fINAME\fP... +.RS 4 +Benchmark the specified benchmark. This flag may be specified multiple times. +.RE +.sp +\fB\-\-benches\fP +.RS 4 +Benchmark all targets in benchmark mode that have the \fBbench = true\fP +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the \fBbench\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-all\-targets\fP +.RS 4 +Benchmark all targets. This is equivalent to specifying \fB\-\-lib \-\-bins +\-\-tests \-\-benches \-\-examples\fP. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Benchmark for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Display Options" +.sp +By default the Rust test harness hides output from benchmark execution to keep +results readable. Benchmark output can be recovered (e.g., for debugging) by +passing \fB\-\-nocapture\fP to the benchmark binaries: +.sp +.if n .RS 4 +.nf +cargo bench \-\- \-\-nocapture +.fi +.if n .RE +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +The \fB\-\-jobs\fP argument affects the building of the benchmark executable but +does not affect how many threads are used when running the benchmarks. The +Rust test harness runs benchmarks serially in a single thread. +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Benchmarks are always built with the \fBbench\fP profile. Binary and lib targets +are built separately as benchmarks with the \fBbench\fP profile. Library targets +are built with the \fBrelease\fP profiles when linked to binaries and benchmarks. +Dependencies use the \fBrelease\fP profile. +.sp +If you need a debug build of a benchmark, try building it with +\fBcargo\-build\fP(1) which will use the \fBtest\fP profile which is by default +unoptimized and includes debug information. You can then run the debug\-enabled +benchmark manually. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Build and execute all the benchmarks of the current package: +.sp +.if n .RS 4 +.nf +cargo bench +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Run only a specific benchmark within a specific benchmark target: +.sp +.if n .RS 4 +.nf +cargo bench \-\-bench bench_name \-\- modname::some_benchmark +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-test\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-build.1 b/src/etc/man/cargo-build.1 new file mode 100644 index 00000000000..eeb3d9805c2 --- /dev/null +++ b/src/etc/man/cargo-build.1 @@ -0,0 +1,470 @@ +'\" t +.\" Title: cargo-build +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-05-08 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-BUILD" "1" "2019-05-08" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-build \- Compile the current package +.SH "SYNOPSIS" +.sp +\fBcargo build [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +Compile local packages and all of their dependencies. +.SH "OPTIONS" +.SS "Package Selection" +.sp +By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (\fB\-\-all\fP is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the \fBworkspace.default\-members\fP key in the root \fBCargo.toml\fP +manifest. +.sp +\fB\-p\fP \fISPEC\fP..., \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Build only the specified packages. See \fBcargo\-pkgid\fP(1) for the +SPEC format. This flag may be specified multiple times. +.RE +.sp +\fB\-\-all\fP +.RS 4 +Build all members in the workspace. +.RE +.sp +\fB\-\-exclude\fP \fISPEC\fP... +.RS 4 +Exclude the specified packages. Must be used in conjunction with the +\fB\-\-all\fP flag. This flag may be specified multiple times. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo build\fP will build all +binary and library targets of the selected packages. Binaries are skipped if +they have \fBrequired\-features\fP that are missing. +.sp +Passing target selection flags will build only the +specified targets. +.sp +\fB\-\-lib\fP +.RS 4 +Build the package\(cqs library. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Build the specified binary. This flag may be specified multiple times. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Build all binary targets. +.RE +.sp +\fB\-\-example\fP \fINAME\fP... +.RS 4 +Build the specified example. This flag may be specified multiple times. +.RE +.sp +\fB\-\-examples\fP +.RS 4 +Build all example targets. +.RE +.sp +\fB\-\-test\fP \fINAME\fP... +.RS 4 +Build the specified integration test. This flag may be specified multiple +times. +.RE +.sp +\fB\-\-tests\fP +.RS 4 +Build all targets in test mode that have the \fBtest = true\fP manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the \fBtest\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-bench\fP \fINAME\fP... +.RS 4 +Build the specified benchmark. This flag may be specified multiple times. +.RE +.sp +\fB\-\-benches\fP +.RS 4 +Build all targets in benchmark mode that have the \fBbench = true\fP +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the \fBbench\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-all\-targets\fP +.RS 4 +Build all targets. This is equivalent to specifying \fB\-\-lib \-\-bins +\-\-tests \-\-benches \-\-examples\fP. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Build for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-release\fP +.RS 4 +Build optimized artifacts with the \fBrelease\fP profile. See the +PROFILES section for details on how this affects profile selection. +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.sp +\fB\-\-out\-dir\fP \fIDIRECTORY\fP +.RS 4 +Copy final artifacts to this directory. +.sp +This option is unstable and available only on the +\c +.URL "https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html" "nightly channel" +and requires the \fB\-Z unstable\-options\fP flag to enable. +See \c +.URL "https://github.com/rust\-lang/cargo/issues/6790" "" " " +for more information. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.sp +\fB\-\-build\-plan\fP +.RS 4 +Outputs a series of JSON messages to stdout that indicate the commands to +run the build. +.sp +This option is unstable and available only on the +\c +.URL "https://doc.rust\-lang.org/book/appendix\-07\-nightly\-rust.html" "nightly channel" +and requires the \fB\-Z unstable\-options\fP flag to enable. +See \c +.URL "https://github.com/rust\-lang/cargo/issues/5579" "" " " +for more information. +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Profile selection depends on the target and crate being built. By default the +\fBdev\fP or \fBtest\fP profiles are used. If the \fB\-\-release\fP flag is given, then the +\fBrelease\fP or \fBbench\fP profiles are used. +.TS +allbox tab(:); +lt lt lt. +T{ +.sp +Target +T}:T{ +.sp +Default Profile +T}:T{ +.sp +\fB\-\-release\fP Profile +T} +T{ +.sp +lib, bin, example +T}:T{ +.sp +\fBdev\fP +T}:T{ +.sp +\fBrelease\fP +T} +T{ +.sp +test, bench, or any target +.br +in "test" or "bench" mode +T}:T{ +.sp +\fBtest\fP +T}:T{ +.sp +\fBbench\fP +T} +.TE +.sp +.sp +Dependencies use the \fBdev\fP/\fBrelease\fP profiles. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Build the local package and all of its dependencies: +.sp +.if n .RS 4 +.nf +cargo build +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Build with optimizations: +.sp +.if n .RS 4 +.nf +cargo build \-\-release +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-rustc\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-check.1 b/src/etc/man/cargo-check.1 new file mode 100644 index 00000000000..2ae71dea30b --- /dev/null +++ b/src/etc/man/cargo-check.1 @@ -0,0 +1,456 @@ +'\" t +.\" Title: cargo-check +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-CHECK" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-check \- Check the current package +.SH "SYNOPSIS" +.sp +\fBcargo check [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +Check a local package and all of its dependencies for errors. This will +essentially compile the packages without performing the final step of code +generation, which is faster than running \fBcargo build\fP. The compiler will save +metadata files to disk so that future runs will reuse them if the source has +not been modified. +.SH "OPTIONS" +.SS "Package Selection" +.sp +By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (\fB\-\-all\fP is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the \fBworkspace.default\-members\fP key in the root \fBCargo.toml\fP +manifest. +.sp +\fB\-p\fP \fISPEC\fP..., \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Check only the specified packages. See \fBcargo\-pkgid\fP(1) for the +SPEC format. This flag may be specified multiple times. +.RE +.sp +\fB\-\-all\fP +.RS 4 +Check all members in the workspace. +.RE +.sp +\fB\-\-exclude\fP \fISPEC\fP... +.RS 4 +Exclude the specified packages. Must be used in conjunction with the +\fB\-\-all\fP flag. This flag may be specified multiple times. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo check\fP will check all +binary and library targets of the selected packages. Binaries are skipped if +they have \fBrequired\-features\fP that are missing. +.sp +Passing target selection flags will check only the +specified targets. +.sp +\fB\-\-lib\fP +.RS 4 +Check the package\(cqs library. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Check the specified binary. This flag may be specified multiple times. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Check all binary targets. +.RE +.sp +\fB\-\-example\fP \fINAME\fP... +.RS 4 +Check the specified example. This flag may be specified multiple times. +.RE +.sp +\fB\-\-examples\fP +.RS 4 +Check all example targets. +.RE +.sp +\fB\-\-test\fP \fINAME\fP... +.RS 4 +Check the specified integration test. This flag may be specified multiple +times. +.RE +.sp +\fB\-\-tests\fP +.RS 4 +Check all targets in test mode that have the \fBtest = true\fP manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the \fBtest\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-bench\fP \fINAME\fP... +.RS 4 +Check the specified benchmark. This flag may be specified multiple times. +.RE +.sp +\fB\-\-benches\fP +.RS 4 +Check all targets in benchmark mode that have the \fBbench = true\fP +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the \fBbench\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-all\-targets\fP +.RS 4 +Check all targets. This is equivalent to specifying \fB\-\-lib \-\-bins +\-\-tests \-\-benches \-\-examples\fP. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Check for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-release\fP +.RS 4 +Check optimized artifacts with the \fBrelease\fP profile. See the +PROFILES section for details on how this affects profile selection. +.RE +.sp +\fB\-\-profile\fP \fINAME\fP +.RS 4 +Changes check behavior. Currently only \fBtest\fP is +supported, which will check with the +\fB#[cfg(test)]\fP attribute enabled. This is useful to have it +check unit tests which are usually excluded via +the \fBcfg\fP attribute. This does not change the actual profile used. +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Profile selection depends on the target and crate being built. By default the +\fBdev\fP or \fBtest\fP profiles are used. If the \fB\-\-release\fP flag is given, then the +\fBrelease\fP or \fBbench\fP profiles are used. +.TS +allbox tab(:); +lt lt lt. +T{ +.sp +Target +T}:T{ +.sp +Default Profile +T}:T{ +.sp +\fB\-\-release\fP Profile +T} +T{ +.sp +lib, bin, example +T}:T{ +.sp +\fBdev\fP +T}:T{ +.sp +\fBrelease\fP +T} +T{ +.sp +test, bench, or any target +.br +in "test" or "bench" mode +T}:T{ +.sp +\fBtest\fP +T}:T{ +.sp +\fBbench\fP +T} +.TE +.sp +.sp +Dependencies use the \fBdev\fP/\fBrelease\fP profiles. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Check the local package for errors: +.sp +.if n .RS 4 +.nf +cargo check +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Check all targets, including unit tests: +.sp +.if n .RS 4 +.nf +cargo check \-\-all\-targets \-\-profile=test +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-build\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-clean.1 b/src/etc/man/cargo-clean.1 new file mode 100644 index 00000000000..1afb47c2de3 --- /dev/null +++ b/src/etc/man/cargo-clean.1 @@ -0,0 +1,244 @@ +'\" t +.\" Title: cargo-clean +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-CLEAN" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-clean \- Remove generated artifacts +.SH "SYNOPSIS" +.sp +\fBcargo clean [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +Remove artifacts from the target directory that Cargo has generated in the +past. +.sp +With no options, \fBcargo clean\fP will delete the entire target directory. +.SH "OPTIONS" +.SS "Package Selection" +.sp +When no packages are selected, all packages and all dependencies in the +workspace are cleaned. +.sp +\fB\-p\fP \fISPEC\fP..., \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Clean only the specified packages. This flag may be specified +multiple times. See \fBcargo\-pkgid\fP(1) for the SPEC format. +.RE +.SS "Clean Options" +.sp +\fB\-\-doc\fP +.RS 4 +This option will cause \fBcargo clean\fP to remove only the \fBdoc\fP directory in +the target directory. +.RE +.sp +\fB\-\-release\fP +.RS 4 +Clean all artifacts that were built with the \fBrelease\fP or \fBbench\fP +profiles. +.RE +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Clean for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Remove the entire target directory: +.sp +.if n .RS 4 +.nf +cargo clean +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Remove only the release artifacts: +.sp +.if n .RS 4 +.nf +cargo clean \-\-release +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-build\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-doc.1 b/src/etc/man/cargo-doc.1 new file mode 100644 index 00000000000..26a91320f60 --- /dev/null +++ b/src/etc/man/cargo-doc.1 @@ -0,0 +1,397 @@ +'\" t +.\" Title: cargo-doc +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-DOC" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-doc \- Build a package\(aqs documentation +.SH "SYNOPSIS" +.sp +\fBcargo doc [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +Build the documentation for the local package and all dependencies. The output +is placed in \fBtarget/doc\fP in rustdoc\(cqs usual format. +.SH "OPTIONS" +.SS "Documentation Options" +.sp +\fB\-\-open\fP +.RS 4 +Open the docs in a browser after building them. +.RE +.sp +\fB\-\-no\-deps\fP +.RS 4 +Do not build documentation for dependencies. +.RE +.sp +\fB\-\-document\-private\-items\fP +.RS 4 +Include non\-public items in the documentation. +.RE +.SS "Package Selection" +.sp +By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (\fB\-\-all\fP is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the \fBworkspace.default\-members\fP key in the root \fBCargo.toml\fP +manifest. +.sp +\fB\-p\fP \fISPEC\fP..., \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Document only the specified packages. See \fBcargo\-pkgid\fP(1) for the +SPEC format. This flag may be specified multiple times. +.RE +.sp +\fB\-\-all\fP +.RS 4 +Document all members in the workspace. +.RE +.sp +\fB\-\-exclude\fP \fISPEC\fP... +.RS 4 +Exclude the specified packages. Must be used in conjunction with the +\fB\-\-all\fP flag. This flag may be specified multiple times. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo doc\fP will document all +binary and library targets of the selected package. The binary will be skipped +if its name is the same as the lib target. Binaries are skipped if they have +\fBrequired\-features\fP that are missing. +.sp +The default behavior can be changed by setting \fBdoc = false\fP for the target in +the manifest settings. Using target selection options will ignore the \fBdoc\fP +flag and will always document the given target. +.sp +\fB\-\-lib\fP +.RS 4 +Document the package\(cqs library. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Document the specified binary. This flag may be specified multiple times. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Document all binary targets. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Document for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-release\fP +.RS 4 +Document optimized artifacts with the \fBrelease\fP profile. See the +PROFILES section for details on how this affects profile selection. +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Profile selection depends on the target and crate being built. By default the +\fBdev\fP or \fBtest\fP profiles are used. If the \fB\-\-release\fP flag is given, then the +\fBrelease\fP or \fBbench\fP profiles are used. +.TS +allbox tab(:); +lt lt lt. +T{ +.sp +Target +T}:T{ +.sp +Default Profile +T}:T{ +.sp +\fB\-\-release\fP Profile +T} +T{ +.sp +lib, bin, example +T}:T{ +.sp +\fBdev\fP +T}:T{ +.sp +\fBrelease\fP +T} +T{ +.sp +test, bench, or any target +.br +in "test" or "bench" mode +T}:T{ +.sp +\fBtest\fP +T}:T{ +.sp +\fBbench\fP +T} +.TE +.sp +.sp +Dependencies use the \fBdev\fP/\fBrelease\fP profiles. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Build the local package documentation and its dependencies and output to +\fBtarget/doc\fP. +.sp +.if n .RS 4 +.nf +cargo doc +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-rustdoc\fP(1), \fBrustdoc\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-fetch.1 b/src/etc/man/cargo-fetch.1 new file mode 100644 index 00000000000..1fb8c1c1797 --- /dev/null +++ b/src/etc/man/cargo-fetch.1 @@ -0,0 +1,205 @@ +'\" t +.\" Title: cargo-fetch +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-05-12 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-FETCH" "1" "2019-05-12" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-fetch \- Fetch dependencies of a package from the network +.SH "SYNOPSIS" +.sp +\fBcargo fetch [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +If a \fBCargo.lock\fP file is available, this command will ensure that all of the +git dependencies and/or registry dependencies are downloaded and locally +available. Subsequent Cargo commands never touch the network after a \fBcargo +fetch\fP unless the lock file changes. +.sp +If the lock file is not available, then this command will generate the lock +file before fetching the dependencies. +.sp +If \fB\-\-target\fP is not specified, then all target dependencies are fetched. +.sp +See also the \c +.URL "https://crates.io/crates/cargo\-prefetch" "cargo\-prefetch" +plugin which adds a command to download popular crates. This may be useful if +you plan to use Cargo without a network with the \fB\-\-offline\fP flag. +.SH "OPTIONS" +.SS "Fetch options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Fetch for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Fetch all dependencies: +.sp +.if n .RS 4 +.nf +cargo fetch +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-update\fP(1), \fBcargo\-generate\-lockfile\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-fix.1 b/src/etc/man/cargo-fix.1 new file mode 100644 index 00000000000..25ac7e7e773 --- /dev/null +++ b/src/etc/man/cargo-fix.1 @@ -0,0 +1,543 @@ +'\" t +.\" Title: cargo-fix +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-FIX" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-fix \- Automatically fix lint warnings reported by rustc +.SH "SYNOPSIS" +.sp +\fBcargo fix [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +This Cargo subcommand will automatically take rustc\(cqs suggestions from +diagnostics like warnings and apply them to your source code. This is intended +to help automate tasks that rustc itself already knows how to tell you to fix! +The \fBcargo fix\fP subcommand is also being developed for the Rust 2018 edition +to provide code the ability to easily opt\-in to the new edition without having +to worry about any breakage. +.sp +Executing \fBcargo fix\fP will under the hood execute \fBcargo\-check\fP(1). Any warnings +applicable to your crate will be automatically fixed (if possible) and all +remaining warnings will be displayed when the check process is finished. For +example if you\(cqd like to prepare for the 2018 edition, you can do so by +executing: +.sp +.if n .RS 4 +.nf +cargo fix \-\-edition +.fi +.if n .RE +.sp +which behaves the same as \fBcargo check \-\-all\-targets\fP. Similarly if you\(cqd like +to fix code for different platforms you can do: +.sp +.if n .RS 4 +.nf +cargo fix \-\-edition \-\-target x86_64\-pc\-windows\-gnu +.fi +.if n .RE +.sp +or if your crate has optional features: +.sp +.if n .RS 4 +.nf +cargo fix \-\-edition \-\-no\-default\-features \-\-features foo +.fi +.if n .RE +.sp +If you encounter any problems with \fBcargo fix\fP or otherwise have any questions +or feature requests please don\(cqt hesitate to file an issue at +.URL "https://github.com/rust\-lang/cargo" "" "" +.SH "OPTIONS" +.SS "Fix options" +.sp +\fB\-\-broken\-code\fP +.RS 4 +Fix code even if it already has compiler errors. This is useful if \fBcargo +fix\fP fails to apply the changes. It will apply the changes and leave the +broken code in the working directory for you to inspect and manually fix. +.RE +.sp +\fB\-\-edition\fP +.RS 4 +Apply changes that will update the code to the latest edition. This will +not update the edition in the \fBCargo.toml\fP manifest, which must be updated +manually. +.RE +.sp +\fB\-\-edition\-idioms\fP +.RS 4 +Apply suggestions that will update code to the preferred style for the +current edition. +.RE +.sp +\fB\-\-allow\-no\-vcs\fP +.RS 4 +Fix code even if a VCS was not detected. +.RE +.sp +\fB\-\-allow\-dirty\fP +.RS 4 +Fix code even if the working directory has changes. +.RE +.sp +\fB\-\-allow\-staged\fP +.RS 4 +Fix code even if the working directory has staged changes. +.RE +.SS "Package Selection" +.sp +By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (\fB\-\-all\fP is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the \fBworkspace.default\-members\fP key in the root \fBCargo.toml\fP +manifest. +.sp +\fB\-p\fP \fISPEC\fP..., \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Fix only the specified packages. See \fBcargo\-pkgid\fP(1) for the +SPEC format. This flag may be specified multiple times. +.RE +.sp +\fB\-\-all\fP +.RS 4 +Fix all members in the workspace. +.RE +.sp +\fB\-\-exclude\fP \fISPEC\fP... +.RS 4 +Exclude the specified packages. Must be used in conjunction with the +\fB\-\-all\fP flag. This flag may be specified multiple times. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo fix\fP will fix all targets +(\fB\-\-all\-targets\fP implied). Binaries are skipped if they have +\fBrequired\-features\fP that are missing. +.sp +Passing target selection flags will fix only the +specified targets. +.sp +\fB\-\-lib\fP +.RS 4 +Fix the package\(cqs library. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Fix the specified binary. This flag may be specified multiple times. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Fix all binary targets. +.RE +.sp +\fB\-\-example\fP \fINAME\fP... +.RS 4 +Fix the specified example. This flag may be specified multiple times. +.RE +.sp +\fB\-\-examples\fP +.RS 4 +Fix all example targets. +.RE +.sp +\fB\-\-test\fP \fINAME\fP... +.RS 4 +Fix the specified integration test. This flag may be specified multiple +times. +.RE +.sp +\fB\-\-tests\fP +.RS 4 +Fix all targets in test mode that have the \fBtest = true\fP manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the \fBtest\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-bench\fP \fINAME\fP... +.RS 4 +Fix the specified benchmark. This flag may be specified multiple times. +.RE +.sp +\fB\-\-benches\fP +.RS 4 +Fix all targets in benchmark mode that have the \fBbench = true\fP +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the \fBbench\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-all\-targets\fP +.RS 4 +Fix all targets. This is equivalent to specifying \fB\-\-lib \-\-bins +\-\-tests \-\-benches \-\-examples\fP. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Fix for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-release\fP +.RS 4 +Fix optimized artifacts with the \fBrelease\fP profile. See the +PROFILES section for details on how this affects profile selection. +.RE +.sp +\fB\-\-profile\fP \fINAME\fP +.RS 4 +Changes fix behavior. Currently only \fBtest\fP is +supported, which will fix with the +\fB#[cfg(test)]\fP attribute enabled. This is useful to have it +fix unit tests which are usually excluded via +the \fBcfg\fP attribute. This does not change the actual profile used. +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Profile selection depends on the target and crate being built. By default the +\fBdev\fP or \fBtest\fP profiles are used. If the \fB\-\-release\fP flag is given, then the +\fBrelease\fP or \fBbench\fP profiles are used. +.TS +allbox tab(:); +lt lt lt. +T{ +.sp +Target +T}:T{ +.sp +Default Profile +T}:T{ +.sp +\fB\-\-release\fP Profile +T} +T{ +.sp +lib, bin, example +T}:T{ +.sp +\fBdev\fP +T}:T{ +.sp +\fBrelease\fP +T} +T{ +.sp +test, bench, or any target +.br +in "test" or "bench" mode +T}:T{ +.sp +\fBtest\fP +T}:T{ +.sp +\fBbench\fP +T} +.TE +.sp +.sp +Dependencies use the \fBdev\fP/\fBrelease\fP profiles. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Apply compiler suggestions to the local package: +.sp +.if n .RS 4 +.nf +cargo fix +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Convert a 2015 edition to 2018: +.sp +.if n .RS 4 +.nf +cargo fix \-\-edition +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Apply suggested idioms for the current edition: +.sp +.if n .RS 4 +.nf +cargo fix \-\-edition\-idioms +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-check\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-generate-lockfile.1 b/src/etc/man/cargo-generate-lockfile.1 new file mode 100644 index 00000000000..107b8c45e76 --- /dev/null +++ b/src/etc/man/cargo-generate-lockfile.1 @@ -0,0 +1,185 @@ +'\" t +.\" Title: cargo-generate-lockfile +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-GENERATE\-LOCKFILE" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-generate\-lockfile \- Generate the lockfile for a package +.SH "SYNOPSIS" +.sp +\fBcargo generate\-lockfile [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +This command will create the \fBCargo.lock\fP lockfile for the current package or +workspace. If the lockfile already exists, it will be rebuilt if there are any +manifest changes or dependency updates. +.sp +See also \fBcargo\-update\fP(1) which is also capable of creating a \fBCargo.lock\fP +lockfile and has more options for controlling update behavior. +.SH "OPTIONS" +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Create or update the lockfile for the current package or workspace: +.sp +.if n .RS 4 +.nf +cargo generate\-lockfile +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-update\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-help.1 b/src/etc/man/cargo-help.1 new file mode 100644 index 00000000000..30e036d5a96 --- /dev/null +++ b/src/etc/man/cargo-help.1 @@ -0,0 +1,75 @@ +'\" t +.\" Title: cargo-help +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2018-12-20 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-HELP" "1" "2018-12-20" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-help \- Get help for a Cargo command +.SH "SYNOPSIS" +.sp +\fBcargo help [\fISUBCOMMAND\fP]\fP +.SH "DESCRIPTION" +.sp +Prints a help message for the given command. +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Get help for a command: +.sp +.if n .RS 4 +.nf +cargo help build +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Help is also available with the \fB\-\-help\fP flag: +.sp +.if n .RS 4 +.nf +cargo build \-\-help +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-init.1 b/src/etc/man/cargo-init.1 new file mode 100644 index 00000000000..1a55f5c57dc --- /dev/null +++ b/src/etc/man/cargo-init.1 @@ -0,0 +1,364 @@ +'\" t +.\" Title: cargo-init +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-01-23 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-INIT" "1" "2019-01-23" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-init \- Create a new Cargo package in an existing directory +.SH "SYNOPSIS" +.sp +\fBcargo init [\fIOPTIONS\fP] [\fIPATH\fP]\fP +.SH "DESCRIPTION" +.sp +This command will create a new Cargo manifest in the current directory. Give a +path as an argument to create in the given directory. +.sp +If there are typically\-named Rust source files already in the directory, those +will be used. If not, then a sample \fBsrc/main.rs\fP file will be created, or +\fBsrc/lib.rs\fP if \fB\-\-lib\fP is passed. +.sp +If the directory is not already in a VCS repository, then a new repository +is created (see \fB\-\-vcs\fP below). +.sp +The "authors" field in the manifest is determined from the environment or +configuration settings. A name is required and is determined from (first match +wins): +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBcargo\-new.name\fP Cargo config value +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCARGO_NAME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBGIT_AUTHOR_NAME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBGIT_COMMITTER_NAME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBuser.name\fP git configuration value +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBUSER\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBUSERNAME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBNAME\fP environment variable +.RE +.sp +The email address is optional and is determined from: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBcargo\-new.email\fP Cargo config value +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCARGO_EMAIL\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBGIT_AUTHOR_EMAIL\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBGIT_COMMITTER_EMAIL\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBuser.email\fP git configuration value +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBEMAIL\fP environment variable +.RE +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "the reference" " " +for more information about +configuration files. +.sp +See \fBcargo\-new\fP(1) for a similar command which will create a new package in +a new directory. +.SH "OPTIONS" +.SS "Init Options" +.sp +\fB\-\-bin\fP +.RS 4 +Create a package with a binary target (\fBsrc/main.rs\fP). +This is the default behavior. +.RE +.sp +\fB\-\-lib\fP +.RS 4 +Create a package with a library target (\fBsrc/lib.rs\fP). +.RE +.sp +\fB\-\-edition\fP \fIEDITION\fP +.RS 4 +Specify the Rust edition to use. Default is 2018. +Possible values: 2015, 2018 +.RE +.sp +\fB\-\-name\fP \fINAME\fP +.RS 4 +Set the package name. Defaults to the directory name. +.RE +.sp +\fB\-\-vcs\fP \fIVCS\fP +.RS 4 +Initialize a new VCS repository for the given version control system (git, +hg, pijul, or fossil) or do not initialize any version control at all +(none). If not specified, defaults to \fBgit\fP or the configuration value +\fBcargo\-new.vcs\fP, or \fBnone\fP if already inside a VCS repository. +.RE +.sp +\fB\-\-registry\fP \fIREGISTRY\fP +.RS 4 +This sets the \fBpublish\fP field in \fBCargo.toml\fP to the given registry name +which will restrict publishing only to that registry. +.sp +Registry names are defined in \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config files" "." +If not specified, the default registry defined by the \fBregistry.default\fP +config key is used. If the default registry is not set and \fB\-\-registry\fP is not +used, the \fBpublish\fP field will not be set which means that publishing will not +be restricted. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Create a binary Cargo package in the current directory: +.sp +.if n .RS 4 +.nf +cargo init +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-new\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-install.1 b/src/etc/man/cargo-install.1 new file mode 100644 index 00000000000..73998642f55 --- /dev/null +++ b/src/etc/man/cargo-install.1 @@ -0,0 +1,415 @@ +'\" t +.\" Title: cargo-install +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-06-10 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-INSTALL" "1" "2019-06-10" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-install \- Build and install a Rust binary +.SH "SYNOPSIS" +.sp +\fBcargo install [\fIOPTIONS\fP] \fICRATE\fP...\fP +.br +\fBcargo install [\fIOPTIONS\fP] \-\-path \fIPATH\fP\fP +.br +\fBcargo install [\fIOPTIONS\fP] \-\-git \fIURL\fP [\fICRATE\fP...]\fP +.br +\fBcargo install [\fIOPTIONS\fP] \-\-list\fP +.SH "DESCRIPTION" +.sp +This command manages Cargo\(cqs local set of installed binary crates. Only +packages which have executable \fB[[bin]]\fP or \fB[[example]]\fP targets can be +installed, and all executables are installed into the installation root\(cqs +\fBbin\fP folder. +.sp +The installation root is determined, in order of precedence: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fB\-\-root\fP option +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCARGO_INSTALL_ROOT\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBinstall.root\fP Cargo \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "" +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCARGO_HOME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fB$HOME/.cargo\fP +.RE +.sp +There are multiple sources from which a crate can be installed. The default +location is crates.io but the \fB\-\-git\fP, \fB\-\-path\fP, and \fB\-\-registry\fP flags can +change this source. If the source contains more than one package (such as +crates.io or a git repository with multiple crates) the \fICRATE\fP argument is +required to indicate which crate should be installed. +.sp +Crates from crates.io can optionally specify the version they wish to install +via the \fB\-\-version\fP flags, and similarly packages from git repositories can +optionally specify the branch, tag, or revision that should be installed. If a +crate has multiple binaries, the \fB\-\-bin\fP argument can selectively install only +one of them, and if you\(cqd rather install examples the \fB\-\-example\fP argument can +be used as well. +.sp +If the source is crates.io or \fB\-\-git\fP then by default the crate will be built +in a temporary target directory. To avoid this, the target directory can be +specified by setting the \fBCARGO_TARGET_DIR\fP environment variable to a relative +path. In particular, this can be useful for caching build artifacts on +continuous integration systems. +.sp +By default, the \fBCargo.lock\fP file that is included with the package will be +ignored. This means that Cargo will recompute which versions of dependencies +to use, possibly using newer versions that have been released since the +package was published. The \fB\-\-locked\fP flag can be used to force Cargo to use +the packaged \fBCargo.lock\fP file if it is available. This may be useful for +ensuring reproducible builds, to use the exact same set of dependencies that +were available when the package was published. It may also be useful if a +newer version of a dependency is published that no longer builds on your +system, or has other problems. The downside to using \fB\-\-locked\fP is that you +will not receive any fixes or updates to any dependency. Note that Cargo did +not start publishing \fBCargo.lock\fP files until version 1.37, which means +packages published with prior versions will not have a \fBCargo.lock\fP file +available. +.SH "OPTIONS" +.SS "Install Options" +.sp +\fB\-\-vers\fP \fIVERSION\fP, \fB\-\-version\fP \fIVERSION\fP +.RS 4 +Specify a version to install. +.RE +.sp +\fB\-\-git\fP \fIURL\fP +.RS 4 +Git URL to install the specified crate from. +.RE +.sp +\fB\-\-branch\fP \fIBRANCH\fP +.RS 4 +Branch to use when installing from git. +.RE +.sp +\fB\-\-tag\fP \fITAG\fP +.RS 4 +Tag to use when installing from git. +.RE +.sp +\fB\-\-rev\fP \fISHA\fP +.RS 4 +Specific commit to use when installing from git. +.RE +.sp +\fB\-\-path\fP \fIPATH\fP +.RS 4 +Filesystem path to local crate to install. +.RE +.sp +\fB\-\-list\fP +.RS 4 +List all installed packages and their versions. +.RE +.sp +\fB\-f\fP, \fB\-\-force\fP +.RS 4 +Force overwriting existing crates or binaries. This can be used to +reinstall or upgrade a crate. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Install only the specified binary. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Install all binaries. +.RE +.sp +\fB\-\-example\fP \fINAME\fP... +.RS 4 +Install only the specified example. +.RE +.sp +\fB\-\-examples\fP +.RS 4 +Install all examples. +.RE +.sp +\fB\-\-root\fP \fIDIR\fP +.RS 4 +Directory to install packages into. +.RE +.sp +\fB\-\-registry\fP \fIREGISTRY\fP +.RS 4 +Name of the registry to use. Registry names are defined in \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config files" "." +If not specified, the default registry is used, which is defined by the +\fBregistry.default\fP config key which defaults to \fBcrates\-io\fP. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Install for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-debug\fP +.RS 4 +Build with the \fBdev\fP profile instead the \fBrelease\fP profile. +.RE +.SS "Manifest Options" +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Install a package from crates.io: +.sp +.if n .RS 4 +.nf +cargo install ripgrep +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Reinstall or upgrade a package: +.sp +.if n .RS 4 +.nf +cargo install ripgrep \-\-force +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-uninstall\fP(1), \fBcargo\-search\fP(1), \fBcargo\-publish\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-locate-project.1 b/src/etc/man/cargo-locate-project.1 new file mode 100644 index 00000000000..9d5dca9f3f7 --- /dev/null +++ b/src/etc/man/cargo-locate-project.1 @@ -0,0 +1,155 @@ +'\" t +.\" Title: cargo-locate-project +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2018-12-20 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-LOCATE\-PROJECT" "1" "2018-12-20" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-locate\-project \- Print a JSON representation of a Cargo.toml file\(aqs location +.SH "SYNOPSIS" +.sp +\fBcargo locate\-project [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +This command will print a JSON object to stdout with the full path to the +\fBCargo.toml\fP manifest. +.sp +See also \fBcargo\-metadata\fP(1) which is capable of returning the path to a +workspace root. +.SH "OPTIONS" +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Display the path to the manifest based on the current directory: +.sp +.if n .RS 4 +.nf +cargo locate\-project +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-metadata\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-login.1 b/src/etc/man/cargo-login.1 new file mode 100644 index 00000000000..2f9ec810bf0 --- /dev/null +++ b/src/etc/man/cargo-login.1 @@ -0,0 +1,163 @@ +'\" t +.\" Title: cargo-login +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-01-23 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-LOGIN" "1" "2019-01-23" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-login \- Save an API token from the registry locally +.SH "SYNOPSIS" +.sp +\fBcargo login [\fIOPTIONS\fP] [\fITOKEN\fP]\fP +.SH "DESCRIPTION" +.sp +This command will save the API token to disk so that commands that require +authentication, such as \fBcargo\-publish\fP(1), will be automatically +authenticated. The token is saved in \fB$CARGO_HOME/credentials\fP. \fBCARGO_HOME\fP +defaults to \fB.cargo\fP in your home directory. +.sp +If the \fITOKEN\fP argument is not specified, it will be read from stdin. +.sp +The API token for crates.io may be retrieved from \c +.URL "https://crates.io/me" "" "." +.sp +Take care to keep the token secret, it should not be shared with anyone else. +.SH "OPTIONS" +.SS "Login Options" +.sp +\fB\-\-registry\fP \fIREGISTRY\fP +.RS 4 +Name of the registry to use. Registry names are defined in \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config files" "." +If not specified, the default registry is used, which is defined by the +\fBregistry.default\fP config key which defaults to \fBcrates\-io\fP. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Save the API token to disk: +.sp +.if n .RS 4 +.nf +cargo login +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-publish\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-metadata.1 b/src/etc/man/cargo-metadata.1 new file mode 100644 index 00000000000..46ab466584b --- /dev/null +++ b/src/etc/man/cargo-metadata.1 @@ -0,0 +1,449 @@ +'\" t +.\" Title: cargo-metadata +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-05-20 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-METADATA" "1" "2019-05-20" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-metadata \- Machine\-readable metadata about the current package +.SH "SYNOPSIS" +.sp +\fBcargo metadata [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +Output the resolved dependencies of a package, the concrete used versions +including overrides, in JSON to stdout. +.sp +It is recommended to include the \fB\-\-format\-version\fP flag to future\-proof +your code to ensure the output is in the format you are expecting. +.sp +See the \c +.URL "https://crates.io/crates/cargo_metadata" "cargo_metadata crate" +for a Rust API for reading the metadata. +.SH "OUTPUT FORMAT" +.sp +The output has the following format: +.sp +.if n .RS 4 +.nf +{ + /* Array of all packages in the workspace. + It also includes all feature\-enabled dependencies unless \-\-no\-deps is used. + */ + "packages": [ + { + /* The name of the package. */ + "name": "my\-package", + /* The version of the package. */ + "version": "0.1.0", + /* The Package ID, a unique identifier for referring to the package. */ + "id": "my\-package 0.1.0 (path+file:///path/to/my\-package)", + /* The license value from the manifest, or null. */ + "license": "MIT/Apache\-2.0", + /* The license\-file value from the manifest, or null. */ + "license_file": "LICENSE", + /* The description value from the manifest, or null. */ + "description": "Package description.", + /* The source ID of the package. This represents where + a package is retrieved from. + This is null for path dependencies and workspace members. + For other dependencies, it is a string with the format: + \- "registry+URL" for registry\-based dependencies. + Example: "registry+https://github.com/rust\-lang/crates.io\-index" + \- "git+URL" for git\-based dependencies. + Example: "git+https://github.com/rust\-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" + */ + "source": null, + /* Array of dependencies declared in the package\(aqs manifest. */ + "dependencies": [ + { + /* The name of the dependency. */ + "name": "bitflags", + /* The source ID of the dependency. May be null, see + description for the package source. + */ + "source": "registry+https://github.com/rust\-lang/crates.io\-index", + /* The version requirement for the dependency. + Dependencies without a version requirement have a value of "*". + */ + "req": "^1.0", + /* The dependency kind. + "dev", "build", or null for a normal dependency. + */ + "kind": null, + /* If the dependency is renamed, this is the new name for + the dependency as a string. null if it is not renamed. + */ + "rename": null, + /* Boolean of whether or not this is an optional dependency. */ + "optional": false, + /* Boolean of whether or not default features are enabled. */ + "uses_default_features": true, + /* Array of features enabled. */ + "features": [], + /* The target platform for the dependency. + null if not a target dependency. + */ + "target": "cfg(windows)", + /* A string of the URL of the registry this dependency is from. + If not specified or null, the dependency is from the default + registry (crates.io). + */ + "registry": null + } + ], + /* Array of Cargo targets. */ + "targets": [ + { + /* Array of target kinds. + \- lib targets list the `crate\-type` values from the + manifest such as "lib", "rlib", "dylib", + "proc\-macro", etc. (default ["lib"]) + \- binary is ["bin"] + \- example is ["example"] + \- integration test is ["test"] + \- benchmark is ["bench"] + \- build script is ["custom\-build"] + */ + "kind": [ + "bin" + ], + /* Array of crate types. + \- lib and example libraries list the `crate\-type` values + from the manifest such as "lib", "rlib", "dylib", + "proc\-macro", etc. (default ["lib"]) + \- all other target kinds are ["bin"] + */ + "crate_types": [ + "bin" + ], + /* The name of the target. */ + "name": "my\-package", + /* Absolute path to the root source file of the target. */ + "src_path": "/path/to/my\-package/src/main.rs", + /* The Rust edition of the target. + Defaults to the package edition. + */ + "edition": "2018", + /* Array of required features. + This property is not included if no required features are set. + */ + "required\-features": ["feat1"], + /* Whether or not this target has doc tests enabled, and + the target is compatible with doc testing. + */ + "doctest": false + } + ], + /* Set of features defined for the package. + Each feature maps to an array of features or dependencies it + enables. + */ + "features": { + "default": [ + "feat1" + ], + "feat1": [], + "feat2": [] + }, + /* Absolute path to this package\(aqs manifest. */ + "manifest_path": "/path/to/my\-package/Cargo.toml", + /* Package metadata. + This is null if no metadata is specified. + */ + "metadata": { + "docs": { + "rs": { + "all\-features": true + } + } + }, + /* Array of authors from the manifest. + Empty array if no authors specified. + */ + "authors": [ + "Jane Doe " + ], + /* Array of categories from the manifest. */ + "categories": [ + "command\-line\-utilities" + ], + /* Array of keywords from the manifest. */ + "keywords": [ + "cli" + ], + /* The readme value from the manifest or null if not specified. */ + "readme": "README.md", + /* The repository value from the manifest or null if not specified. */ + "repository": "https://github.com/rust\-lang/cargo", + /* The default edition of the package. + Note that individual targets may have different editions. + */ + "edition": "2018", + /* Optional string that is the name of a native library the package + is linking to. + */ + "links": null, + } + ], + /* Array of members of the workspace. + Each entry is the Package ID for the package. + */ + "workspace_members": [ + "my\-package 0.1.0 (path+file:///path/to/my\-package)", + ], + /* The resolved dependency graph, with the concrete versions and features + selected. The set depends on the enabled features. + This is null if \-\-no\-deps is specified. + */ + "resolve": { + /* Array of nodes within the dependency graph. + Each node is a package. + */ + "nodes": [ + { + /* The Package ID of this node. */ + "id": "my\-package 0.1.0 (path+file:///path/to/my\-package)", + /* The dependencies of this package, an array of Package IDs. */ + "dependencies": [ + "bitflags 1.0.4 (registry+https://github.com/rust\-lang/crates.io\-index)" + ], + /* The dependencies of this package. This is an alternative to + "dependencies" which contains additional information. In + particular, this handles renamed dependencies. + */ + "deps": [ + { + /* The name of the dependency\(aqs library target. + If this is a renamed dependency, this is the new + name. + */ + "name": "bitflags", + /* The Package ID of the dependency. */ + "pkg": "bitflags 1.0.4 (registry+https://github.com/rust\-lang/crates.io\-index)" + } + ], + /* Array of features enabled on this package. */ + "features": [ + "default" + ] + } + ], + /* The root package of the workspace. + This is null if this is a virtual workspace. Otherwise it is + the Package ID of the root package. + */ + "root": "my\-package 0.1.0 (path+file:///path/to/my\-package)" + }, + /* The absolute path to the build directory where Cargo places its output. */ + "target_directory": "/path/to/my\-package/target", + /* The version of the schema for this metadata structure. + This will be changed if incompatible changes are ever made. + */ + "version": 1, + /* The absolute path to the root of the workspace. */ + "workspace_root": "/path/to/my\-package" +} +.fi +.if n .RE +.SH "OPTIONS" +.SS "Output Options" +.sp +\fB\-\-no\-deps\fP +.RS 4 +Output information only about the workspace members and don\(cqt fetch +dependencies. +.RE +.sp +\fB\-\-format\-version\fP \fIVERSION\fP +.RS 4 +Specify the version of the output format to use. Currently \fB1\fP is the only +possible value. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Output JSON about the current package: +.sp +.if n .RS 4 +.nf +cargo metadata \-\-format\-version=1 +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-new.1 b/src/etc/man/cargo-new.1 new file mode 100644 index 00000000000..894b5ab6e2f --- /dev/null +++ b/src/etc/man/cargo-new.1 @@ -0,0 +1,359 @@ +'\" t +.\" Title: cargo-new +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-01-23 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-NEW" "1" "2019-01-23" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-new \- Create a new Cargo package +.SH "SYNOPSIS" +.sp +\fBcargo new [\fIOPTIONS\fP] \fIPATH\fP\fP +.SH "DESCRIPTION" +.sp +This command will create a new Cargo package in the given directory. This +includes a simple template with a \fBCargo.toml\fP manifest, sample source file, +and a VCS ignore file. If the directory is not already in a VCS repository, +then a new repository is created (see \fB\-\-vcs\fP below). +.sp +The "authors" field in the manifest is determined from the environment or +configuration settings. A name is required and is determined from (first match +wins): +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBcargo\-new.name\fP Cargo config value +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCARGO_NAME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBGIT_AUTHOR_NAME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBGIT_COMMITTER_NAME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBuser.name\fP git configuration value +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBUSER\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBUSERNAME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBNAME\fP environment variable +.RE +.sp +The email address is optional and is determined from: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBcargo\-new.email\fP Cargo config value +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCARGO_EMAIL\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBGIT_AUTHOR_EMAIL\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBGIT_COMMITTER_EMAIL\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBuser.email\fP git configuration value +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBEMAIL\fP environment variable +.RE +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "the reference" " " +for more information about +configuration files. +.sp +See \fBcargo\-init\fP(1) for a similar command which will create a new manifest +in an existing directory. +.SH "OPTIONS" +.SS "New Options" +.sp +\fB\-\-bin\fP +.RS 4 +Create a package with a binary target (\fBsrc/main.rs\fP). +This is the default behavior. +.RE +.sp +\fB\-\-lib\fP +.RS 4 +Create a package with a library target (\fBsrc/lib.rs\fP). +.RE +.sp +\fB\-\-edition\fP \fIEDITION\fP +.RS 4 +Specify the Rust edition to use. Default is 2018. +Possible values: 2015, 2018 +.RE +.sp +\fB\-\-name\fP \fINAME\fP +.RS 4 +Set the package name. Defaults to the directory name. +.RE +.sp +\fB\-\-vcs\fP \fIVCS\fP +.RS 4 +Initialize a new VCS repository for the given version control system (git, +hg, pijul, or fossil) or do not initialize any version control at all +(none). If not specified, defaults to \fBgit\fP or the configuration value +\fBcargo\-new.vcs\fP, or \fBnone\fP if already inside a VCS repository. +.RE +.sp +\fB\-\-registry\fP \fIREGISTRY\fP +.RS 4 +This sets the \fBpublish\fP field in \fBCargo.toml\fP to the given registry name +which will restrict publishing only to that registry. +.sp +Registry names are defined in \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config files" "." +If not specified, the default registry defined by the \fBregistry.default\fP +config key is used. If the default registry is not set and \fB\-\-registry\fP is not +used, the \fBpublish\fP field will not be set which means that publishing will not +be restricted. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Create a binary Cargo package in the given directory: +.sp +.if n .RS 4 +.nf +cargo new foo +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-init\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-owner.1 b/src/etc/man/cargo-owner.1 new file mode 100644 index 00000000000..8e798a3b2bf --- /dev/null +++ b/src/etc/man/cargo-owner.1 @@ -0,0 +1,238 @@ +'\" t +.\" Title: cargo-owner +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-02-05 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-OWNER" "1" "2019-02-05" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-owner \- Manage the owners of a crate on the registry +.SH "SYNOPSIS" +.sp +\fBcargo owner [\fIOPTIONS\fP] \-\-add \fILOGIN\fP [\fICRATE\fP]\fP +.br +\fBcargo owner [\fIOPTIONS\fP] \-\-remove \fILOGIN\fP [\fICRATE\fP]\fP +.br +\fBcargo owner [\fIOPTIONS\fP] \-\-list [\fICRATE\fP]\fP +.SH "DESCRIPTION" +.sp +This command will modify the owners for a crate on the registry. Owners of a +crate can upload new versions and yank old versions. Non\-team owners can also +modify the set of owners, so take care! +.sp +This command requires you to be authenticated with either the \fB\-\-token\fP option +or using \fBcargo\-login\fP(1). +.sp +If the crate name is not specified, it will use the package name from the +current directory. +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/publishing.html#cargo\-owner" "the reference" " " +for more +information about owners and publishing. +.SH "OPTIONS" +.SS "Owner Options" +.sp +\fB\-a\fP, \fB\-\-add\fP \fILOGIN\fP... +.RS 4 +Invite the given user or team as an owner. +.RE +.sp +\fB\-r\fP, \fB\-\-remove\fP \fILOGIN\fP... +.RS 4 +Remove the given user or team as an owner. +.RE +.sp +\fB\-l\fP, \fB\-\-list\fP +.RS 4 +List owners of a crate. +.RE +.sp +\fB\-\-token\fP \fITOKEN\fP +.RS 4 +API token to use when authenticating. This overrides the token stored in +the credentials file (which is created by \fBcargo\-login\fP(1)). +.sp +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config" " " +environment variables can be +used to override the tokens stored in the credentials file. The token for +crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fP environment +variable. Tokens for other registries may be specified with environment +variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fP where \fBNAME\fP is the name +of the registry in all capital letters. +.RE +.sp +\fB\-\-index\fP \fIINDEX\fP +.RS 4 +The URL of the registry index to use. +.RE +.sp +\fB\-\-registry\fP \fIREGISTRY\fP +.RS 4 +Name of the registry to use. Registry names are defined in \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config files" "." +If not specified, the default registry is used, which is defined by the +\fBregistry.default\fP config key which defaults to \fBcrates\-io\fP. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +List owners of a package: +.sp +.if n .RS 4 +.nf +cargo owner \-\-list foo +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Invite an owner to a package: +.sp +.if n .RS 4 +.nf +cargo owner \-\-add username foo +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Remove an owner from a package: +.sp +.if n .RS 4 +.nf +cargo owner \-\-remove username foo +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-login\fP(1), \fBcargo\-publish\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-package.1 b/src/etc/man/cargo-package.1 new file mode 100644 index 00000000000..dfd592e24e5 --- /dev/null +++ b/src/etc/man/cargo-package.1 @@ -0,0 +1,372 @@ +'\" t +.\" Title: cargo-package +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-06-10 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-PACKAGE" "1" "2019-06-10" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-package \- Assemble the local package into a distributable tarball +.SH "SYNOPSIS" +.sp +\fBcargo package [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +This command will create a distributable, compressed \fB.crate\fP file with the +source code of the package in the current directory. The resulting file will +be stored in the \fBtarget/package\fP directory. This performs the following +steps: +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Load and check the current workspace, performing some basic checks. +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +Path dependencies are not allowed unless they have a version key. Cargo +will ignore the path key for dependencies in published packages. +.RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Create the compressed \fB.crate\fP file. +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +The original \fBCargo.toml\fP file is rewritten and normalized. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fB[patch]\fP, \fB[replace]\fP, and \fB[workspace]\fP sections are removed from the +manifest. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCargo.lock\fP is automatically included if the package contains an +executable binary or example target. \fBcargo\-install\fP(1) will use the +packaged lock file if the \fB\-\-locked\fP flag is used. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +A \fB.cargo_vcs_info.json\fP file is included that contains information +about the current VCS checkout hash if available (not included with +\fB\-\-allow\-dirty\fP). +.RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Extract the \fB.crate\fP file and build it to verify it can build. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 4.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 4." 4.2 +.\} +Check that build scripts did not modify any source files. +.RE +.sp +The list of files included can be controlled with the \fBinclude\fP and \fBexclude\fP +fields in the manifest. +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/publishing.html" "the reference" " " +for more details about +packaging and publishing. +.SH "OPTIONS" +.SS "Package Options" +.sp +\fB\-l\fP, \fB\-\-list\fP +.RS 4 +Print files included in a package without making one. +.RE +.sp +\fB\-\-no\-verify\fP +.RS 4 +Don\(cqt verify the contents by building them. +.RE +.sp +\fB\-\-no\-metadata\fP +.RS 4 +Ignore warnings about a lack of human\-usable metadata (such as the +description or the license). +.RE +.sp +\fB\-\-allow\-dirty\fP +.RS 4 +Allow working directories with uncommitted VCS changes to be packaged. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Package for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Create a compressed \fB.crate\fP file of the current package: +.sp +.if n .RS 4 +.nf +cargo package +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-publish\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-pkgid.1 b/src/etc/man/cargo-pkgid.1 new file mode 100644 index 00000000000..1b80afa3c75 --- /dev/null +++ b/src/etc/man/cargo-pkgid.1 @@ -0,0 +1,296 @@ +'\" t +.\" Title: cargo-pkgid +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-PKGID" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-pkgid \- Print a fully qualified package specification +.SH "SYNOPSIS" +.sp +\fBcargo pkgid [\fIOPTIONS\fP] [\fISPEC\fP]\fP +.SH "DESCRIPTION" +.sp +Given a \fISPEC\fP argument, print out the fully qualified package ID specifier +for a package or dependency in the current workspace. This command will +generate an error if \fISPEC\fP is ambiguous as to which package it refers to in +the dependency graph. If no \fISPEC\fP is given, then the specifier for the local +package is printed. +.sp +This command requires that a lockfile is available and dependencies have been +fetched. +.sp +A package specifier consists of a name, version, and source URL. You are +allowed to use partial specifiers to succinctly match a specific package as +long as it matches only one package. The format of a \fISPEC\fP can be one of the +following: +.sp +.it 1 an-trap +.nr an-no-space-flag 1 +.nr an-break-flag 1 +.br +.B Table 1. SPEC Query Format +.TS +allbox tab(:); +lt lt. +T{ +.sp +SPEC Structure +T}:T{ +.sp +Example SPEC +T} +T{ +.sp +\fINAME\fP +T}:T{ +.sp +\fBbitflags\fP +T} +T{ +.sp +\fINAME\fP\fB:\fP\fIVERSION\fP +T}:T{ +.sp +\fBbitflags:1.0.4\fP +T} +T{ +.sp +\fIURL\fP +T}:T{ +.sp +\fB\c +.URL "https://github.com/rust\-lang/cargo" "" "\fP" +T} +T{ +.sp +\fIURL\fP\fB#\fP\fIVERSION\fP +T}:T{ +.sp +\fB\c +.URL "https://github.com/rust\-lang/cargo#0.33.0" "" "\fP" +T} +T{ +.sp +\fIURL\fP\fB#\fP\fINAME\fP +T}:T{ +.sp +\fB\c +.URL "https://github.com/rust\-lang/crates.io\-index#bitflags" "" "\fP" +T} +T{ +.sp +\fIURL\fP\fB#\fP\fINAME\fP\fB:\fP\fIVERSION\fP +T}:T{ +.sp +\fB\c +.URL "https://github.com/rust\-lang/cargo#crates\-io:0.21.0" "" "\fP" +T} +.TE +.sp +.SH "OPTIONS" +.SS "Package Selection" +.sp +\fB\-p\fP \fISPEC\fP, \fB\-\-package\fP \fISPEC\fP +.RS 4 +Get the package ID for the given package instead of the current package. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Retrieve package specification for \fBfoo\fP package: +.sp +.if n .RS 4 +.nf +cargo pkgid foo +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Retrieve package specification for version 1.0.0 of \fBfoo\fP: +.sp +.if n .RS 4 +.nf +cargo pkgid foo:1.0.0 +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Retrieve package specification for \fBfoo\fP from crates.io: +.sp +.if n .RS 4 +.nf +cargo pkgid https://github.com/rust\-lang/crates.io\-index#foo +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-generate\-lockfile\fP(1), \fBcargo\-metadata\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-publish.1 b/src/etc/man/cargo-publish.1 new file mode 100644 index 00000000000..c457a003c4c --- /dev/null +++ b/src/etc/man/cargo-publish.1 @@ -0,0 +1,336 @@ +'\" t +.\" Title: cargo-publish +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-05-08 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-PUBLISH" "1" "2019-05-08" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-publish \- Upload a package to the registry +.SH "SYNOPSIS" +.sp +\fBcargo publish [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +This command will create a distributable, compressed \fB.crate\fP file with the +source code of the package in the current directory and upload it to a +registry. The default registry is \c +.URL "https://crates.io" "" "." +This performs the +following steps: +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Performs a few checks, including: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +Checks the \fBpackage.publish\fP key in the manifest for restrictions on which +registries you are allowed to publish to. +.RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Create a \fB.crate\fP file by following the steps in \fBcargo\-package\fP(1). +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Upload the crate to the registry. Note that the server will perform +additional checks on the crate. +.RE +.sp +This command requires you to be authenticated with either the \fB\-\-token\fP option +or using \fBcargo\-login\fP(1). +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/publishing.html" "the reference" " " +for more details about +packaging and publishing. +.SH "OPTIONS" +.SS "Publish Options" +.sp +\fB\-\-dry\-run\fP +.RS 4 +Perform all checks without uploading. +.RE +.sp +\fB\-\-token\fP \fITOKEN\fP +.RS 4 +API token to use when authenticating. This overrides the token stored in +the credentials file (which is created by \fBcargo\-login\fP(1)). +.sp +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config" " " +environment variables can be +used to override the tokens stored in the credentials file. The token for +crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fP environment +variable. Tokens for other registries may be specified with environment +variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fP where \fBNAME\fP is the name +of the registry in all capital letters. +.RE +.sp +\fB\-\-no\-verify\fP +.RS 4 +Don\(cqt verify the contents by building them. +.RE +.sp +\fB\-\-allow\-dirty\fP +.RS 4 +Allow working directories with uncommitted VCS changes to be packaged. +.RE +.sp +\fB\-\-index\fP \fIINDEX\fP +.RS 4 +The URL of the registry index to use. +.RE +.sp +\fB\-\-registry\fP \fIREGISTRY\fP +.RS 4 +Name of the registry to use. Registry names are defined in \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config files" "." +If not specified, the default registry is used, which is defined by the +\fBregistry.default\fP config key which defaults to \fBcrates\-io\fP. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Publish for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Publish the current package: +.sp +.if n .RS 4 +.nf +cargo publish +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-package\fP(1), \fBcargo\-login\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-run.1 b/src/etc/man/cargo-run.1 new file mode 100644 index 00000000000..da42337d243 --- /dev/null +++ b/src/etc/man/cargo-run.1 @@ -0,0 +1,376 @@ +'\" t +.\" Title: cargo-run +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-06-21 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-RUN" "1" "2019-06-21" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-run \- Run the current package +.SH "SYNOPSIS" +.sp +\fBcargo run [\fIOPTIONS\fP] [\-\- \fIARGS\fP]\fP +.SH "DESCRIPTION" +.sp +Run a binary or example of the local package. +.sp +All the arguments following the two dashes (\fB\-\-\fP) are passed to the binary to +run. If you\(cqre passing arguments to both Cargo and the binary, the ones after +\fB\-\-\fP go to the binary, the ones before go to Cargo. +.SH "OPTIONS" +.SS "Package Selection" +.sp +By default, the package in the current working directory is selected. The \fB\-p\fP +flag can be used to choose a different package in a workspace. +.sp +\fB\-p\fP \fISPEC\fP, \fB\-\-package\fP \fISPEC\fP +.RS 4 +The package to run. See \fBcargo\-pkgid\fP(1) for +the SPEC format. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo run\fP will run the binary +target. If there are multiple binary targets, you must pass a target flag to +choose one. Or, the \fBdefault\-run\fP field may be specified in the \fB[package]\fP +section of \fBCargo.toml\fP to choose the name of the binary to run by default. +.sp +\fB\-\-bin\fP \fINAME\fP +.RS 4 +Run the specified binary. +.RE +.sp +\fB\-\-example\fP \fINAME\fP +.RS 4 +Run the specified example. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Run for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-release\fP +.RS 4 +Run optimized artifacts with the \fBrelease\fP profile. See the +PROFILES section for details on how this affects profile selection. +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Profile selection depends on the target and crate being built. By default the +\fBdev\fP or \fBtest\fP profiles are used. If the \fB\-\-release\fP flag is given, then the +\fBrelease\fP or \fBbench\fP profiles are used. +.TS +allbox tab(:); +lt lt lt. +T{ +.sp +Target +T}:T{ +.sp +Default Profile +T}:T{ +.sp +\fB\-\-release\fP Profile +T} +T{ +.sp +lib, bin, example +T}:T{ +.sp +\fBdev\fP +T}:T{ +.sp +\fBrelease\fP +T} +T{ +.sp +test, bench, or any target +.br +in "test" or "bench" mode +T}:T{ +.sp +\fBtest\fP +T}:T{ +.sp +\fBbench\fP +T} +.TE +.sp +.sp +Dependencies use the \fBdev\fP/\fBrelease\fP profiles. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Build the local package and run its main target (assuming only one binary): +.sp +.if n .RS 4 +.nf +cargo run +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Run an example with extra arguments: +.sp +.if n .RS 4 +.nf +cargo run \-\-example exname \-\- \-\-exoption exarg1 exarg2 +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-build\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-rustc.1 b/src/etc/man/cargo-rustc.1 new file mode 100644 index 00000000000..f4cdd997f52 --- /dev/null +++ b/src/etc/man/cargo-rustc.1 @@ -0,0 +1,446 @@ +'\" t +.\" Title: cargo-rustc +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-RUSTC" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-rustc \- Compile the current package, and pass extra options to the compiler +.SH "SYNOPSIS" +.sp +\fBcargo rustc [\fIOPTIONS\fP] [\-\- \fIARGS\fP]\fP +.SH "DESCRIPTION" +.sp +The specified target for the current package (or package specified by \fB\-p\fP if +provided) will be compiled along with all of its dependencies. The specified +\fIARGS\fP will all be passed to the final compiler invocation, not any of the +dependencies. Note that the compiler will still unconditionally receive +arguments such as \fB\-L\fP, \fB\-\-extern\fP, and \fB\-\-crate\-type\fP, and the specified +\fIARGS\fP will simply be added to the compiler invocation. +.sp +See \c +.URL "https://doc.rust\-lang.org/rustc/index.html" "" " " +for documentation on rustc +flags. +.sp +This command requires that only one target is being compiled when additional +arguments are provided. If more than one target is available for the current +package the filters of \fB\-\-lib\fP, \fB\-\-bin\fP, etc, must be used to select which +target is compiled. +To pass flags to all compiler processes spawned by Cargo, use the \fBRUSTFLAGS\fP +environment variable or the \fBbuild.rustflags\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.SH "OPTIONS" +.SS "Package Selection" +.sp +By default, the package in the current working directory is selected. The \fB\-p\fP +flag can be used to choose a different package in a workspace. +.sp +\fB\-p\fP \fISPEC\fP, \fB\-\-package\fP \fISPEC\fP +.RS 4 +The package to build. See \fBcargo\-pkgid\fP(1) for +the SPEC format. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo rustc\fP will build all +binary and library targets of the selected package. +.sp +Passing target selection flags will build only the +specified targets. +.sp +\fB\-\-lib\fP +.RS 4 +Build the package\(cqs library. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Build the specified binary. This flag may be specified multiple times. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Build all binary targets. +.RE +.sp +\fB\-\-example\fP \fINAME\fP... +.RS 4 +Build the specified example. This flag may be specified multiple times. +.RE +.sp +\fB\-\-examples\fP +.RS 4 +Build all example targets. +.RE +.sp +\fB\-\-test\fP \fINAME\fP... +.RS 4 +Build the specified integration test. This flag may be specified multiple +times. +.RE +.sp +\fB\-\-tests\fP +.RS 4 +Build all targets in test mode that have the \fBtest = true\fP manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the \fBtest\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-bench\fP \fINAME\fP... +.RS 4 +Build the specified benchmark. This flag may be specified multiple times. +.RE +.sp +\fB\-\-benches\fP +.RS 4 +Build all targets in benchmark mode that have the \fBbench = true\fP +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the \fBbench\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-all\-targets\fP +.RS 4 +Build all targets. This is equivalent to specifying \fB\-\-lib \-\-bins +\-\-tests \-\-benches \-\-examples\fP. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Build for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-release\fP +.RS 4 +Build optimized artifacts with the \fBrelease\fP profile. See the +PROFILES section for details on how this affects profile selection. +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Profile selection depends on the target and crate being built. By default the +\fBdev\fP or \fBtest\fP profiles are used. If the \fB\-\-release\fP flag is given, then the +\fBrelease\fP or \fBbench\fP profiles are used. +.TS +allbox tab(:); +lt lt lt. +T{ +.sp +Target +T}:T{ +.sp +Default Profile +T}:T{ +.sp +\fB\-\-release\fP Profile +T} +T{ +.sp +lib, bin, example +T}:T{ +.sp +\fBdev\fP +T}:T{ +.sp +\fBrelease\fP +T} +T{ +.sp +test, bench, or any target +.br +in "test" or "bench" mode +T}:T{ +.sp +\fBtest\fP +T}:T{ +.sp +\fBbench\fP +T} +.TE +.sp +.sp +Dependencies use the \fBdev\fP/\fBrelease\fP profiles. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Check if your package (not including dependencies) uses unsafe code: +.sp +.if n .RS 4 +.nf +cargo rustc \-\-lib \-\- \-D unsafe\-code +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Try an experimental flag on the nightly compiler, such as this which prints +the size of every type: +.sp +.if n .RS 4 +.nf +cargo rustc \-\-lib \-\- \-Z print\-type\-sizes +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-build\fP(1), \fBrustc\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-rustdoc.1 b/src/etc/man/cargo-rustdoc.1 new file mode 100644 index 00000000000..4e1cc3693ec --- /dev/null +++ b/src/etc/man/cargo-rustdoc.1 @@ -0,0 +1,436 @@ +'\" t +.\" Title: cargo-rustdoc +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-RUSTDOC" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-rustdoc \- Build a package\(aqs documentation, using specified custom flags +.SH "SYNOPSIS" +.sp +\fBcargo rustdoc [\fIOPTIONS\fP] [\-\- \fIARGS\fP]\fP +.SH "DESCRIPTION" +.sp +The specified target for the current package (or package specified by \fB\-p\fP if +provided) will be documented with the specified \fIARGS\fP being passed to the +final rustdoc invocation. Dependencies will not be documented as part of this +command. Note that rustdoc will still unconditionally receive arguments such +as \fB\-L\fP, \fB\-\-extern\fP, and \fB\-\-crate\-type\fP, and the specified \fIARGS\fP will simply +be added to the rustdoc invocation. +.sp +See \c +.URL "https://doc.rust\-lang.org/rustdoc/index.html" "" " " +for documentation on rustdoc +flags. +.sp +This command requires that only one target is being compiled when additional +arguments are provided. If more than one target is available for the current +package the filters of \fB\-\-lib\fP, \fB\-\-bin\fP, etc, must be used to select which +target is compiled. +To pass flags to all rustdoc processes spawned by Cargo, use the +\fBRUSTDOCFLAGS\fP environment variable or the \fBbuild.rustdocflags\fP configuration +option. +.SH "OPTIONS" +.SS "Documentation Options" +.sp +\fB\-\-open\fP +.RS 4 +Open the docs in a browser after building them. +.RE +.SS "Package Selection" +.sp +By default, the package in the current working directory is selected. The \fB\-p\fP +flag can be used to choose a different package in a workspace. +.sp +\fB\-p\fP \fISPEC\fP, \fB\-\-package\fP \fISPEC\fP +.RS 4 +The package to document. See \fBcargo\-pkgid\fP(1) for +the SPEC format. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo rustdoc\fP will document all +binary and library targets of the selected package. The binary will be skipped +if its name is the same as the lib target. Binaries are skipped if they have +\fBrequired\-features\fP that are missing. +.sp +Passing target selection flags will document only the +specified targets. +.sp +\fB\-\-lib\fP +.RS 4 +Document the package\(cqs library. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Document the specified binary. This flag may be specified multiple times. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Document all binary targets. +.RE +.sp +\fB\-\-example\fP \fINAME\fP... +.RS 4 +Document the specified example. This flag may be specified multiple times. +.RE +.sp +\fB\-\-examples\fP +.RS 4 +Document all example targets. +.RE +.sp +\fB\-\-test\fP \fINAME\fP... +.RS 4 +Document the specified integration test. This flag may be specified multiple +times. +.RE +.sp +\fB\-\-tests\fP +.RS 4 +Document all targets in test mode that have the \fBtest = true\fP manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the \fBtest\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-bench\fP \fINAME\fP... +.RS 4 +Document the specified benchmark. This flag may be specified multiple times. +.RE +.sp +\fB\-\-benches\fP +.RS 4 +Document all targets in benchmark mode that have the \fBbench = true\fP +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the \fBbench\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-all\-targets\fP +.RS 4 +Document all targets. This is equivalent to specifying \fB\-\-lib \-\-bins +\-\-tests \-\-benches \-\-examples\fP. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Document for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-release\fP +.RS 4 +Document optimized artifacts with the \fBrelease\fP profile. See the +PROFILES section for details on how this affects profile selection. +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Profile selection depends on the target and crate being built. By default the +\fBdev\fP or \fBtest\fP profiles are used. If the \fB\-\-release\fP flag is given, then the +\fBrelease\fP or \fBbench\fP profiles are used. +.TS +allbox tab(:); +lt lt lt. +T{ +.sp +Target +T}:T{ +.sp +Default Profile +T}:T{ +.sp +\fB\-\-release\fP Profile +T} +T{ +.sp +lib, bin, example +T}:T{ +.sp +\fBdev\fP +T}:T{ +.sp +\fBrelease\fP +T} +T{ +.sp +test, bench, or any target +.br +in "test" or "bench" mode +T}:T{ +.sp +\fBtest\fP +T}:T{ +.sp +\fBbench\fP +T} +.TE +.sp +.sp +Dependencies use the \fBdev\fP/\fBrelease\fP profiles. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Build documentation with custom CSS included from a given file: +.sp +.if n .RS 4 +.nf +cargo rustdoc \-\-lib \-\- \-\-extend\-css extra.css +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-doc\fP(1), \fBrustdoc\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-search.1 b/src/etc/man/cargo-search.1 new file mode 100644 index 00000000000..a789ac6c09a --- /dev/null +++ b/src/etc/man/cargo-search.1 @@ -0,0 +1,167 @@ +'\" t +.\" Title: cargo-search +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-01-23 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-SEARCH" "1" "2019-01-23" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-search \- Search packages in crates.io +.SH "SYNOPSIS" +.sp +\fBcargo search [\fIOPTIONS\fP] [\fIQUERY\fP...]\fP +.SH "DESCRIPTION" +.sp +This performs a textual search for crates on \c +.URL "https://crates.io" "" "." +The matching +crates will be displayed along with their description in TOML format suitable +for copying into a \fBCargo.toml\fP manifest. +.SH "OPTIONS" +.SS "Search Options" +.sp +\fB\-\-limit\fP \fILIMIT\fP +.RS 4 +Limit the number of results (default: 10, max: 100). +.RE +.sp +\fB\-\-index\fP \fIINDEX\fP +.RS 4 +The URL of the registry index to use. +.RE +.sp +\fB\-\-registry\fP \fIREGISTRY\fP +.RS 4 +Name of the registry to use. Registry names are defined in \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config files" "." +If not specified, the default registry is used, which is defined by the +\fBregistry.default\fP config key which defaults to \fBcrates\-io\fP. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Search for a package from crates.io: +.sp +.if n .RS 4 +.nf +cargo search serde +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-install\fP(1), \fBcargo\-publish\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-test.1 b/src/etc/man/cargo-test.1 new file mode 100644 index 00000000000..c856952c45a --- /dev/null +++ b/src/etc/man/cargo-test.1 @@ -0,0 +1,607 @@ +'\" t +.\" Title: cargo-test +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-05-08 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-TEST" "1" "2019-05-08" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-test \- Execute unit and integration tests of a package +.SH "SYNOPSIS" +.sp +\fBcargo test [\fIOPTIONS\fP] [TESTNAME] [\-\- \fITEST\-OPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +Compile and execute unit and integration tests. +.sp +The test filtering argument \fBTESTNAME\fP and all the arguments following the two +dashes (\fB\-\-\fP) are passed to the test binaries and thus to \fIlibtest\fP (rustc\(cqs +built in unit\-test and micro\-benchmarking framework). If you\(cqre passing +arguments to both Cargo and the binary, the ones after \fB\-\-\fP go to the binary, +the ones before go to Cargo. For details about libtest\(cqs arguments see the +output of \fBcargo test \(em \-\-help\fP. As an example, this will run all tests with +\fBfoo\fP in their name on 3 threads in parallel: +.sp +.if n .RS 4 +.nf +cargo test foo \-\- \-\-test\-threads 3 +.fi +.if n .RE +.sp +Tests are built with the \fB\-\-test\fP option to \fBrustc\fP which creates an +executable with a \fBmain\fP function that automatically runs all functions +annotated with the \fB#[test]\fP attribute in multiple threads. \fB#[bench]\fP +annotated functions will also be run with one iteration to verify that they +are functional. +.sp +The libtest harness may be disabled by setting \fBharness = false\fP in the target +manifest settings, in which case your code will need to provide its own \fBmain\fP +function to handle running tests. +.sp +Documentation tests are also run by default, which is handled by \fBrustdoc\fP. It +extracts code samples from documentation comments and executes them. See the +.URL "https://doc.rust\-lang.org/rustdoc/" "rustdoc book" " " +for more information on +writing doc tests. +.SH "OPTIONS" +.SS "Test Options" +.sp +\fB\-\-no\-run\fP +.RS 4 +Compile, but don\(cqt run tests. +.RE +.sp +\fB\-\-no\-fail\-fast\fP +.RS 4 +Run all tests regardless of failure. Without this flag, Cargo will exit +after the first executable fails. The Rust test harness will run all +tests within the executable to completion, this flag only applies to +the executable as a whole. +.RE +.SS "Package Selection" +.sp +By default, when no package selection options are given, the packages selected +depend on the current working directory. In the root of a virtual workspace, +all workspace members are selected (\fB\-\-all\fP is implied). Otherwise, only the +package in the current directory will be selected. The default packages may be +overridden with the \fBworkspace.default\-members\fP key in the root \fBCargo.toml\fP +manifest. +.sp +\fB\-p\fP \fISPEC\fP..., \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Test only the specified packages. See \fBcargo\-pkgid\fP(1) for the +SPEC format. This flag may be specified multiple times. +.RE +.sp +\fB\-\-all\fP +.RS 4 +Test all members in the workspace. +.RE +.sp +\fB\-\-exclude\fP \fISPEC\fP... +.RS 4 +Exclude the specified packages. Must be used in conjunction with the +\fB\-\-all\fP flag. This flag may be specified multiple times. +.RE +.SS "Target Selection" +.sp +When no target selection options are given, \fBcargo test\fP will build the +following targets of the selected packages: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +lib — used to link with binaries, examples, integration tests, and doc tests +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +bins (only if integration tests are built and required features are +available) +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +examples — to ensure they compile +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +lib as a unit test +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +bins as unit tests +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +integration tests +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +doc tests for the lib target +.RE +.sp +The default behavior can be changed by setting the \fBtest\fP flag for the target +in the manifest settings. Setting examples to \fBtest = true\fP will build and run +the example as a test. Setting targets to \fBtest = false\fP will stop them from +being tested by default. Target selection options that take a target by name +ignore the \fBtest\fP flag and will always test the given target. +.sp +Doc tests for libraries may be disabled by setting \fBdoctest = false\fP for the +library in the manifest. +.sp +Passing target selection flags will test only the +specified targets. +.sp +\fB\-\-lib\fP +.RS 4 +Test the package\(cqs library. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Test the specified binary. This flag may be specified multiple times. +.RE +.sp +\fB\-\-bins\fP +.RS 4 +Test all binary targets. +.RE +.sp +\fB\-\-example\fP \fINAME\fP... +.RS 4 +Test the specified example. This flag may be specified multiple times. +.RE +.sp +\fB\-\-examples\fP +.RS 4 +Test all example targets. +.RE +.sp +\fB\-\-test\fP \fINAME\fP... +.RS 4 +Test the specified integration test. This flag may be specified multiple +times. +.RE +.sp +\fB\-\-tests\fP +.RS 4 +Test all targets in test mode that have the \fBtest = true\fP manifest +flag set. By default this includes the library and binaries built as +unittests, and integration tests. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +unittest, and once as a dependency for binaries, integration tests, etc.). +Targets may be enabled or disabled by setting the \fBtest\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-bench\fP \fINAME\fP... +.RS 4 +Test the specified benchmark. This flag may be specified multiple times. +.RE +.sp +\fB\-\-benches\fP +.RS 4 +Test all targets in benchmark mode that have the \fBbench = true\fP +manifest flag set. By default this includes the library and binaries built +as benchmarks, and bench targets. Be aware that this will also build any +required dependencies, so the lib target may be built twice (once as a +benchmark, and once as a dependency for binaries, benchmarks, etc.). +Targets may be enabled or disabled by setting the \fBbench\fP flag in the +manifest settings for the target. +.RE +.sp +\fB\-\-all\-targets\fP +.RS 4 +Test all targets. This is equivalent to specifying \fB\-\-lib \-\-bins +\-\-tests \-\-benches \-\-examples\fP. +.RE +.sp +\fB\-\-doc\fP +.RS 4 +Test only the library\(cqs documentation. This cannot be mixed with other +target options. +.RE +.SS "Feature Selection" +.sp +When no feature options are given, the \fBdefault\fP feature is activated for +every selected package. +.sp +\fB\-\-features\fP \fIFEATURES\fP +.RS 4 +Space or comma separated list of features to activate. These features only +apply to the current directory\(cqs package. Features of direct dependencies +may be enabled with \fB/\fP syntax. +.RE +.sp +\fB\-\-all\-features\fP +.RS 4 +Activate all available features of all selected packages. +.RE +.sp +\fB\-\-no\-default\-features\fP +.RS 4 +Do not activate the \fBdefault\fP feature of the current directory\(cqs +package. +.RE +.SS "Compilation Options" +.sp +\fB\-\-target\fP \fITRIPLE\fP +.RS 4 +Test for the given architecture. The default is the host +architecture. The general format of the triple is +\fB\-\-\-\fP. Run \fBrustc \-\-print target\-list\fP for a +list of supported targets. +.sp +This may also be specified with the \fBbuild.target\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-release\fP +.RS 4 +Test optimized artifacts with the \fBrelease\fP profile. See the +PROFILES section for details on how this affects profile selection. +.RE +.SS "Output Options" +.sp +\fB\-\-target\-dir\fP \fIDIRECTORY\fP +.RS 4 +Directory for all generated artifacts and intermediate files. May also be +specified with the \fBCARGO_TARGET_DIR\fP environment variable, or the +\fBbuild.target\-dir\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults +to \fBtarget\fP in the root of the workspace. +.RE +.SS "Display Options" +.sp +By default the Rust test harness hides output from test execution to keep +results readable. Test output can be recovered (e.g., for debugging) by passing +\fB\-\-nocapture\fP to the test binaries: +.sp +.if n .RS 4 +.nf +cargo test \-\- \-\-nocapture +.fi +.if n .RE +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-\-message\-format\fP \fIFMT\fP +.RS 4 +The output format for diagnostic messages. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBhuman\fP (default): Display in a human\-readable text format. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBjson\fP: Emit JSON messages to stdout. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBshort\fP: Emit shorter, human\-readable text messages. +.RE +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SS "Miscellaneous Options" +.sp +The \fB\-\-jobs\fP argument affects the building of the test executable but does not +affect how many threads are used when running the tests. The Rust test harness +includes an option to control the number of threads used: +.sp +.if n .RS 4 +.nf +cargo test \-j 2 \-\- \-\-test\-threads=2 +.fi +.if n .RE +.sp +\fB\-j\fP \fIN\fP, \fB\-\-jobs\fP \fIN\fP +.RS 4 +Number of parallel jobs to run. May also be specified with the +\fBbuild.jobs\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +Defaults to +the number of CPUs. +.RE +.SH "PROFILES" +.sp +Profiles may be used to configure compiler options such as optimization levels +and debug settings. See +\c +.URL "https://doc.rust\-lang.org/cargo/reference/manifest.html#the\-profile\-sections" "the reference" +for more details. +.sp +Profile selection depends on the target and crate being built. By default the +\fBdev\fP or \fBtest\fP profiles are used. If the \fB\-\-release\fP flag is given, then the +\fBrelease\fP or \fBbench\fP profiles are used. +.TS +allbox tab(:); +lt lt lt. +T{ +.sp +Target +T}:T{ +.sp +Default Profile +T}:T{ +.sp +\fB\-\-release\fP Profile +T} +T{ +.sp +lib, bin, example +T}:T{ +.sp +\fBdev\fP +T}:T{ +.sp +\fBrelease\fP +T} +T{ +.sp +test, bench, or any target +.br +in "test" or "bench" mode +T}:T{ +.sp +\fBtest\fP +T}:T{ +.sp +\fBbench\fP +T} +.TE +.sp +.sp +Dependencies use the \fBdev\fP/\fBrelease\fP profiles. +.sp +Unit tests are separate executable artifacts which use the \fBtest\fP/\fBbench\fP +profiles. Example targets are built the same as with \fBcargo build\fP (using the +\fBdev\fP/\fBrelease\fP profiles) unless you are building them with the test harness +(by setting \fBtest = true\fP in the manifest or using the \fB\-\-example\fP flag) in +which case they use the \fBtest\fP/\fBbench\fP profiles. Library targets are built +with the \fBdev\fP/\fBrelease\fP profiles when linked to an integration test, binary, +or doctest. +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Execute all the unit and integration tests of the current package: +.sp +.if n .RS 4 +.nf +cargo test +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Run only a specific test within a specific integration test: +.sp +.if n .RS 4 +.nf +cargo test \-\-test int_test_name \-\- modname::test_name +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-bench\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-uninstall.1 b/src/etc/man/cargo-uninstall.1 new file mode 100644 index 00000000000..36ea8a80656 --- /dev/null +++ b/src/etc/man/cargo-uninstall.1 @@ -0,0 +1,223 @@ +'\" t +.\" Title: cargo-uninstall +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2018-12-20 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-UNINSTALL" "1" "2018-12-20" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-uninstall \- Remove a Rust binary +.SH "SYNOPSIS" +.sp +\fBcargo uninstall [\fIOPTIONS\fP] [\fISPEC\fP...]\fP +.SH "DESCRIPTION" +.sp +This command removes a package installed with \fBcargo\-install\fP(1). The \fISPEC\fP +argument is a package ID specification of the package to remove (see +\fBcargo\-pkgid\fP(1)). +.sp +By default all binaries are removed for a crate but the \fB\-\-bin\fP and +\fB\-\-example\fP flags can be used to only remove particular binaries. +.sp +The installation root is determined, in order of precedence: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fB\-\-root\fP option +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCARGO_INSTALL_ROOT\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBinstall.root\fP Cargo \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "" +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBCARGO_HOME\fP environment variable +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fB$HOME/.cargo\fP +.RE +.SH "OPTIONS" +.SS "Install Options" +.sp +\fB\-p\fP, \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Package to uninstall. +.RE +.sp +\fB\-\-bin\fP \fINAME\fP... +.RS 4 +Only uninstall the binary \fINAME\fP. +.RE +.sp +\fB\-\-root\fP \fIDIR\fP +.RS 4 +Directory to uninstall packages from. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Uninstall a previously installed package. +.sp +.if n .RS 4 +.nf +cargo uninstall ripgrep +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-install\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-update.1 b/src/etc/man/cargo-update.1 new file mode 100644 index 00000000000..835e6c31412 --- /dev/null +++ b/src/etc/man/cargo-update.1 @@ -0,0 +1,249 @@ +'\" t +.\" Title: cargo-update +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-UPDATE" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-update \- Update dependencies as recorded in the local lock file +.SH "SYNOPSIS" +.sp +\fBcargo update [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +This command will update dependencies in the \fBCargo.lock\fP file to the latest +version. It requires that the \fBCargo.lock\fP file already exists as generated +by commands such as \fBcargo\-build\fP(1) or \fBcargo\-generate\-lockfile\fP(1). +.SH "OPTIONS" +.SS "Update Options" +.sp +\fB\-p\fP \fISPEC\fP..., \fB\-\-package\fP \fISPEC\fP... +.RS 4 +Update only the specified packages. This flag may be specified +multiple times. See \fBcargo\-pkgid\fP(1) for the SPEC format. +.sp +If packages are specified with the \fB\-p\fP flag, then a conservative update of +the lockfile will be performed. This means that only the dependency specified +by SPEC will be updated. Its transitive dependencies will be updated only if +SPEC cannot be updated without updating dependencies. All other dependencies +will remain locked at their currently recorded versions. +.sp +If \fB\-p\fP is not specified, all dependencies are updated. +.RE +.sp +\fB\-\-aggressive\fP +.RS 4 +When used with \fB\-p\fP, dependencies of \fISPEC\fP are forced to update as well. +Cannot be used with \fB\-\-precise\fP. +.RE +.sp +\fB\-\-precise\fP \fIPRECISE\fP +.RS 4 +When used with \fB\-p\fP, allows you to specify a specific version number to +set the package to. If the package comes from a git repository, this can +be a git revision (such as a SHA hash or tag). +.RE +.sp +\fB\-\-dry\-run\fP +.RS 4 +Displays what would be updated, but doesn\(cqt actually write the lockfile. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Update all dependencies in the lockfile: +.sp +.if n .RS 4 +.nf +cargo update +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Update only specific dependencies: +.sp +.if n .RS 4 +.nf +cargo update \-p foo \-p bar +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Set a specific dependency to a specific version: +.sp +.if n .RS 4 +.nf +cargo update \-p foo \-\-precise 1.2.3 +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-generate\-lockfile\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-vendor.1 b/src/etc/man/cargo-vendor.1 new file mode 100644 index 00000000000..3937cc9495e --- /dev/null +++ b/src/etc/man/cargo-vendor.1 @@ -0,0 +1,223 @@ +'\" t +.\" Title: cargo-vendor +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 2.0.8 +.\" Date: 2019-04-29 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-VENDOR" "1" "2019-04-29" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-vendor \- Vendor all dependencies locally +.SH "SYNOPSIS" +.sp +\fBcargo vendor [\fIOPTIONS\fP] [\fIPATH\fP]\fP +.SH "DESCRIPTION" +.sp +This cargo subcommand will vendor all crates.io and git dependencies for a +project into the specified directory at \fB\fP. After this command completes +the vendor directory specified by \fB\fP will contain all remote sources from +dependencies specified. Additional manifests beyond the default one can be +specified with the \fB\-s\fP option. +.sp +The \fBcargo vendor\fP command will also print out the configuration necessary +to use the vendored sources, which you will need to add to \fB.cargo/config\fP. +.SH "OPTIONS" +.SS "Owner Options" +.sp +\fB\-s\fP \fIMANIFEST\fP, \fB\-\-sync\fP \fIMANIFEST\fP +.RS 4 +Specify extra \fBCargo.toml\fP manifests to workspaces which should also be +vendored and synced to the output. +.RE +.sp +\fB\-\-no\-delete\fP +.RS 4 +Don\(cqt delete the "vendor" directory when vendoring, but rather keep all +existing contents of the vendor directory +.RE +.sp +\fB\-\-respect\-source\-config\fP +.RS 4 +Instead of ignoring \fB[source]\fP configuration by default in \fB.cargo/config\fP +read it and use it when downloading crates from crates.io, for example +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Vendor all dependencies into a local "vendor" folder +.sp +.if n .RS 4 +.nf +cargo vendor +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Vendor all dependencies into a local "third\-part/vendor" folder +.sp +.if n .RS 4 +.nf +cargo vendor third\-party/vendor +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Vendor the current workspace as well as another to "vendor" +.sp +.if n .RS 4 +.nf +cargo vendor \-s ../path/to/Cargo.toml +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-verify-project.1 b/src/etc/man/cargo-verify-project.1 new file mode 100644 index 00000000000..a395013406a --- /dev/null +++ b/src/etc/man/cargo-verify-project.1 @@ -0,0 +1,195 @@ +'\" t +.\" Title: cargo-verify-project +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-04-16 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-VERIFY\-PROJECT" "1" "2019-04-16" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-verify\-project \- Check correctness of crate manifest +.SH "SYNOPSIS" +.sp +\fBcargo verify\-project [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +This command will parse the local manifest and check its validity. It emits a +JSON object with the result. A successful validation will display: +.sp +.if n .RS 4 +.nf +{"success":"true"} +.fi +.if n .RE +.sp +An invalid workspace will display: +.sp +.if n .RS 4 +.nf +{"invalid":"human\-readable error message"} +.fi +.if n .RE +.SH "OPTIONS" +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-manifest\-path\fP \fIPATH\fP +.RS 4 +Path to the \fBCargo.toml\fP file. By default, Cargo searches in the current +directory or any parent directory for the \fBCargo.toml\fP file. +.RE +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +The workspace is OK. +.RE +.sp +1 +.RS 4 +The workspace is invalid. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Check the current workspace for errors: +.sp +.if n .RS 4 +.nf +cargo verify\-project +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-package\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-version.1 b/src/etc/man/cargo-version.1 new file mode 100644 index 00000000000..4527d14900f --- /dev/null +++ b/src/etc/man/cargo-version.1 @@ -0,0 +1,99 @@ +'\" t +.\" Title: cargo-version +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2018-12-20 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-VERSION" "1" "2018-12-20" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-version \- Show version information +.SH "SYNOPSIS" +.sp +\fBcargo version [\fIOPTIONS\fP]\fP +.SH "DESCRIPTION" +.sp +Displays the version of Cargo. +.SH "OPTIONS" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Display additional version information. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Display the version: +.sp +.if n .RS 4 +.nf +cargo version +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +The version is also available via flags: +.sp +.if n .RS 4 +.nf +cargo \-\-version +cargo \-V +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Display extra version information: +.sp +.if n .RS 4 +.nf +cargo \-Vv +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo-yank.1 b/src/etc/man/cargo-yank.1 new file mode 100644 index 00000000000..c3a637c7ae4 --- /dev/null +++ b/src/etc/man/cargo-yank.1 @@ -0,0 +1,194 @@ +'\" t +.\" Title: cargo-yank +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-01-23 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO\-YANK" "1" "2019-01-23" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo\-yank \- Remove a pushed crate from the index +.SH "SYNOPSIS" +.sp +\fBcargo yank [\fIOPTIONS\fP] \-\-vers \fIVERSION\fP [\fICRATE\fP]\fP +.SH "DESCRIPTION" +.sp +The yank command removes a previously published crate\(cqs version from the +server\(cqs index. This command does not delete any data, and the crate will +still be available for download via the registry\(cqs download link. +.sp +Note that existing crates locked to a yanked version will still be able to +download the yanked version to use it. Cargo will, however, not allow any new +crates to be locked to any yanked version. +.sp +This command requires you to be authenticated with either the \fB\-\-token\fP option +or using \fBcargo\-login\fP(1). +.sp +If the crate name is not specified, it will use the package name from the +current directory. +.SH "OPTIONS" +.SS "Owner Options" +.sp +\fB\-\-vers\fP \fIVERSION\fP +.RS 4 +The version to yank or un\-yank. +.RE +.sp +\fB\-\-undo\fP +.RS 4 +Undo a yank, putting a version back into the index. +.RE +.sp +\fB\-\-token\fP \fITOKEN\fP +.RS 4 +API token to use when authenticating. This overrides the token stored in +the credentials file (which is created by \fBcargo\-login\fP(1)). +.sp +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config" " " +environment variables can be +used to override the tokens stored in the credentials file. The token for +crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fP environment +variable. Tokens for other registries may be specified with environment +variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fP where \fBNAME\fP is the name +of the registry in all capital letters. +.RE +.sp +\fB\-\-index\fP \fIINDEX\fP +.RS 4 +The URL of the registry index to use. +.RE +.sp +\fB\-\-registry\fP \fIREGISTRY\fP +.RS 4 +Name of the registry to use. Registry names are defined in \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "Cargo config files" "." +If not specified, the default registry is used, which is defined by the +\fBregistry.default\fP config key which defaults to \fBcrates\-io\fP. +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Yank a crate from the index: +.sp +.if n .RS 4 +.nf +cargo yank \-\-vers 1.0.7 foo +.fi +.if n .RE +.RE +.SH "SEE ALSO" +.sp +\fBcargo\fP(1), \fBcargo\-login\fP(1), \fBcargo\-publish\fP(1) \ No newline at end of file diff --git a/src/etc/man/cargo.1 b/src/etc/man/cargo.1 new file mode 100644 index 00000000000..757510051b2 --- /dev/null +++ b/src/etc/man/cargo.1 @@ -0,0 +1,490 @@ +'\" t +.\" Title: cargo +.\" Author: [see the "AUTHOR(S)" section] +.\" Generator: Asciidoctor 1.5.8 +.\" Date: 2019-05-20 +.\" Manual: \ \& +.\" Source: \ \& +.\" Language: English +.\" +.TH "CARGO" "1" "2019-05-20" "\ \&" "\ \&" +.ie \n(.g .ds Aq \(aq +.el .ds Aq ' +.ss \n[.ss] 0 +.nh +.ad l +.de URL +\fI\\$2\fP <\\$1>\\$3 +.. +.als MTO URL +.if \n[.g] \{\ +. mso www.tmac +. am URL +. ad l +. . +. am MTO +. ad l +. . +. LINKSTYLE blue R < > +.\} +.SH "NAME" +cargo \- The Rust package manager +.SH "SYNOPSIS" +.sp +\fBcargo [\fIOPTIONS\fP] \fICOMMAND\fP [\fIARGS\fP]\fP +.br +\fBcargo [\fIOPTIONS\fP] \-\-version\fP +.br +\fBcargo [\fIOPTIONS\fP] \-\-list\fP +.br +\fBcargo [\fIOPTIONS\fP] \-\-help\fP +.br +\fBcargo [\fIOPTIONS\fP] \-\-explain \fICODE\fP\fP +.SH "DESCRIPTION" +.sp +This program is a package manager and build tool for the Rust language, +available at \c +.URL "https://rust\-lang.org" "" "." +.SH "COMMANDS" +.SS "Build Commands" +.sp +\fBcargo\-bench\fP(1) +.RS 4 +Execute benchmarks of a package. +.RE +.sp +\fBcargo\-build\fP(1) +.RS 4 +Compile a package. +.RE +.sp +\fBcargo\-check\fP(1) +.RS 4 +Check a local package and all of its dependencies for errors. +.RE +.sp +\fBcargo\-clean\fP(1) +.RS 4 +Remove artifacts that Cargo has generated in the past. +.RE +.sp +\fBcargo\-doc\fP(1) +.RS 4 +Build a package\(cqs documentation. +.RE +.sp +\fBcargo\-fetch\fP(1) +.RS 4 +Fetch dependencies of a package from the network. +.RE +.sp +\fBcargo\-fix\fP(1) +.RS 4 +Automatically fix lint warnings reported by rustc. +.RE +.sp +\fBcargo\-run\fP(1) +.RS 4 +Run a binary or example of the local package. +.RE +.sp +\fBcargo\-rustc\fP(1) +.RS 4 +Compile a package, and pass extra options to the compiler. +.RE +.sp +\fBcargo\-rustdoc\fP(1) +.RS 4 +Build a package\(cqs documentation, using specified custom flags. +.RE +.sp +\fBcargo\-test\fP(1) +.RS 4 +Execute unit and integration tests of a package. +.RE +.SS "Manifest Commands" +.sp +\fBcargo\-generate\-lockfile\fP(1) +.RS 4 +Generate \fBCargo.lock\fP for a project. +.RE +.sp +\fBcargo\-locate\-project\fP(1) +.RS 4 +Print a JSON representation of a \fBCargo.toml\fP file\(cqs location. +.RE +.sp +\fBcargo\-metadata\fP(1) +.RS 4 +Output the resolved dependencies of a package, the concrete used versions +including overrides, in machine\-readable format. +.RE +.sp +\fBcargo\-pkgid\fP(1) +.RS 4 +Print a fully qualified package specification. +.RE +.sp +\fBcargo\-update\fP(1) +.RS 4 +Update dependencies as recorded in the local lock file. +.RE +.sp +\fBcargo\-verify\-project\fP(1) +.RS 4 +Check correctness of crate manifest. +.RE +.SS "Package Commands" +.sp +\fBcargo\-init\fP(1) +.RS 4 +Create a new Cargo package in an existing directory. +.RE +.sp +\fBcargo\-install\fP(1) +.RS 4 +Build and install a Rust binary. +.RE +.sp +\fBcargo\-new\fP(1) +.RS 4 +Create a new Cargo package. +.RE +.sp +\fBcargo\-search\fP(1) +.RS 4 +Search packages in crates.io. +.RE +.sp +\fBcargo\-uninstall\fP(1) +.RS 4 +Remove a Rust binary. +.RE +.SS "Publishing Commands" +.sp +\fBcargo\-login\fP(1) +.RS 4 +Save an API token from the registry locally. +.RE +.sp +\fBcargo\-owner\fP(1) +.RS 4 +Manage the owners of a crate on the registry. +.RE +.sp +\fBcargo\-package\fP(1) +.RS 4 +Assemble the local package into a distributable tarball. +.RE +.sp +\fBcargo\-publish\fP(1) +.RS 4 +Upload a package to the registry. +.RE +.sp +\fBcargo\-yank\fP(1) +.RS 4 +Remove a pushed crate from the index. +.RE +.SS "General Commands" +.sp +\fBcargo\-help\fP(1) +.RS 4 +Display help information about Cargo. +.RE +.sp +\fBcargo\-version\fP(1) +.RS 4 +Show version information. +.RE +.SH "OPTIONS" +.SS "Special Options" +.sp +\fB\-V\fP, \fB\-\-version\fP +.RS 4 +Print version info and exit. If used with \fB\-\-verbose\fP, prints extra +information. +.RE +.sp +\fB\-\-list\fP +.RS 4 +List all installed Cargo subcommands. If used with \fB\-\-verbose\fP, prints +extra information. +.RE +.sp +\fB\-\-explain \fICODE\fP\fP +.RS 4 +Run \fBrustc \-\-explain CODE\fP which will print out a detailed explanation of +an error message (for example, \fBE0004\fP). +.RE +.SS "Display Options" +.sp +\fB\-v\fP, \fB\-\-verbose\fP +.RS 4 +Use verbose output. May be specified twice for "very verbose" output which +includes extra output such as dependency warnings and build script output. +May also be specified with the \fBterm.verbose\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.sp +\fB\-q\fP, \fB\-\-quiet\fP +.RS 4 +No output printed to stdout. +.RE +.sp +\fB\-\-color\fP \fIWHEN\fP +.RS 4 +Control when colored output is used. Valid values: +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBauto\fP (default): Automatically detect if color support is available on the +terminal. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBalways\fP: Always display colors. +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04'\(bu\h'+03'\c +.\} +.el \{\ +. sp -1 +. IP \(bu 2.3 +.\} +\fBnever\fP: Never display colors. +.RE +.sp +May also be specified with the \fBterm.color\fP +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Manifest Options" +.sp +\fB\-\-frozen\fP, \fB\-\-locked\fP +.RS 4 +Either of these flags requires that the \fBCargo.lock\fP file is +up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will +exit with an error. The \fB\-\-frozen\fP flag also prevents Cargo from +attempting to access the network to determine if it is out\-of\-date. +.sp +These may be used in environments where you want to assert that the +\fBCargo.lock\fP file is up\-to\-date (such as a CI build) or want to avoid network +access. +.RE +.sp +\fB\-\-offline\fP +.RS 4 +Prevents Cargo from accessing the network for any reason. Without this +flag, Cargo will stop with an error if it needs to access the network and +the network is not available. With this flag, Cargo will attempt to +proceed without the network if possible. +.sp +Beware that this may result in different dependency resolution than online +mode. Cargo will restrict itself to crates that are downloaded locally, even +if there might be a newer version as indicated in the local copy of the index. +See the \fBcargo\-fetch\fP(1) command to download dependencies before going +offline. +.sp +May also be specified with the \fBnet.offline\fP \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "config value" "." +.RE +.SS "Common Options" +.sp +\fB\-h\fP, \fB\-\-help\fP +.RS 4 +Prints help information. +.RE +.sp +\fB\-Z\fP \fIFLAG\fP... +.RS 4 +Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fP for +details. +.RE +.SH "ENVIRONMENT" +.sp +See \c +.URL "https://doc.rust\-lang.org/cargo/reference/environment\-variables.html" "the reference" " " +for +details on environment variables that Cargo reads. +.SH "EXIT STATUS" +.sp +0 +.RS 4 +Cargo succeeded. +.RE +.sp +101 +.RS 4 +Cargo failed to complete. +.RE +.SH "FILES" +.sp +\fB~/.cargo/\fP +.RS 4 +Default location for Cargo\(cqs "home" directory where it stores various +files. The location can be changed with the \fBCARGO_HOME\fP environment +variable. +.RE +.sp +\fB$CARGO_HOME/bin/\fP +.RS 4 +Binaries installed by \fBcargo\-install\fP(1) will be located here. If using +rustup, executables distributed with Rust are also located here. +.RE +.sp +\fB$CARGO_HOME/config\fP +.RS 4 +The global configuration file. See \c +.URL "https://doc.rust\-lang.org/cargo/reference/config.html" "the reference" +for more information about configuration files. +.RE +.sp +\fB.cargo/config\fP +.RS 4 +Cargo automatically searches for a file named \fB.cargo/config\fP in the +current directory, and all parent directories. These configuration files +will be merged with the global configuration file. +.RE +.sp +\fB$CARGO_HOME/credentials\fP +.RS 4 +Private authentication information for logging in to a registry. +.RE +.sp +\fB$CARGO_HOME/registry/\fP +.RS 4 +This directory contains cached downloads of the registry index and any +downloaded dependencies. +.RE +.sp +\fB$CARGO_HOME/git/\fP +.RS 4 +This directory contains cached downloads of git dependencies. +.RE +.SH "EXAMPLES" +.sp +.RS 4 +.ie n \{\ +\h'-04' 1.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 1." 4.2 +.\} +Build a local package and all of its dependencies: +.sp +.if n .RS 4 +.nf +cargo build +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 2.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 2." 4.2 +.\} +Build a package with optimizations: +.sp +.if n .RS 4 +.nf +cargo build \-\-release +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 3.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 3." 4.2 +.\} +Run tests for a cross\-compiled target: +.sp +.if n .RS 4 +.nf +cargo test \-\-target i686\-unknown\-linux\-gnu +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 4.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 4." 4.2 +.\} +Create a new package that builds an executable: +.sp +.if n .RS 4 +.nf +cargo new foobar +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 5.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 5." 4.2 +.\} +Create a package in the current directory: +.sp +.if n .RS 4 +.nf +mkdir foo && cd foo +cargo init . +.fi +.if n .RE +.RE +.sp +.RS 4 +.ie n \{\ +\h'-04' 6.\h'+01'\c +.\} +.el \{\ +. sp -1 +. IP " 6." 4.2 +.\} +Learn about a command\(cqs options and usage: +.sp +.if n .RS 4 +.nf +cargo help clean +.fi +.if n .RE +.RE +.SH "BUGS" +.sp +See \c +.URL "https://github.com/rust\-lang/cargo/issues" "" " " +for issues. +.SH "SEE ALSO" +.sp +\fBrustc\fP(1), \fBrustdoc\fP(1) diff --git a/src/etc/print-new-snapshot.py b/src/etc/print-new-snapshot.py deleted file mode 100644 index ecfbda0337c..00000000000 --- a/src/etc/print-new-snapshot.py +++ /dev/null @@ -1,31 +0,0 @@ -# When updating snapshots, run this file and pipe it into `src/snapshots.txt` -import os -import sys -import hashlib -import download - -date = sys.argv[1] - -print(date) - -if not os.path.isdir('target/dl'): - os.makedirs('target/dl') - -snaps = { - 'macos-i386': 'i686-apple-darwin', - 'macos-x86_64': 'x86_64-apple-darwin', - 'linux-i386': 'i686-unknown-linux-gnu', - 'linux-x86_64': 'x86_64-unknown-linux-gnu', - 'winnt-i386': 'i686-pc-windows-gnu', - 'winnt-x86_64': 'x86_64-pc-windows-gnu', - 'bitrig-x86_64': 'x86_64-unknown-bitrig', -} - -for platform in sorted(snaps): - triple = snaps[platform] - tarball = 'cargo-nightly-' + triple + '.tar.gz' - url = 'https://static.rust-lang.org/cargo-dist/' + date + '/' + tarball - dl_path = "target/dl/" + tarball - download.get(url, dl_path, quiet=True) - h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest() - print(' ' + platform + ' ' + h) diff --git a/src/rust-installer b/src/rust-installer deleted file mode 160000 index c37d3747da7..00000000000 --- a/src/rust-installer +++ /dev/null @@ -1 +0,0 @@ -Subproject commit c37d3747da75c280237dc2d6b925078e69555499 diff --git a/src/rustversion.txt b/src/rustversion.txt deleted file mode 100644 index 49b1509a7c4..00000000000 --- a/src/rustversion.txt +++ /dev/null @@ -1 +0,0 @@ -2015-08-13 diff --git a/src/snapshots.txt b/src/snapshots.txt deleted file mode 100644 index ac6c154449f..00000000000 --- a/src/snapshots.txt +++ /dev/null @@ -1,202 +0,0 @@ -2015-04-02 - freebsd-x86_64 2e0ade0901864ea67200f990cb289343b08959e7 - bitrig-x86_64 1b39aba2b9e1a7c9b5ac890b864eb1cb8a18e4d0 - linux-i386 ba6c162680d5509d89ba2363d7cae2047f40c034 - linux-x86_64 94f715c9a52809a639f2ce6f8b1d5215a0c272b5 - macos-i386 cf333f16f89bfd50e8ce461c6f81ca30d33f7f73 - macos-x86_64 1f7008a6ec860e2bc7580e71bdf320ac518ddeb8 - winnt-i386 8c0088ae9e47133b976f7ad155c50ca9abb2906c - winnt-x86_64 01ae9ea568211a20f048e7b00d902d6fe72d1627 - -2015-03-26 - linux-i386 d8b59fb0a0e8222b1753370f1d7c91dcb9697b37 - linux-x86_64 e2f8388d6bccad3b3f09bbbe4ea1bc9671224f4c - macos-i386 3baad9c920c4a68bfd8c10ba3afb80013559adf5 - macos-x86_64 394afa61b945717bca18412c3c93a428db7d6d5d - winnt-i386 4bc98dabc039c34c040942f0eadd99ddb37f06dc - winnt-x86_64 54d948ed95b86b9c63861246cf7cfd7161a48a20 - -2015-03-17 - linux-i386 96a64fa9b4b6cc0cddaa90ecde4e08254c9025d5 - linux-x86_64 354bb5b11b1f19e270ebc0553db1ddc560999bdb - macos-i386 d1b69ef765bc450a3758b8abdb0909df7893058b - macos-x86_64 a2328a82e073c230cd88dcfac96bdc784a999200 - winnt-i386 fb6e346d59bda47ed87e36800e8bfe210cf01297 - winnt-x86_64 4ef3d1ce315df8b27bd842fb66b8e2b03ce99a08 - -2015-02-26 - linux-i386 2a28b604d09b4a76a54a05d91f7f158692427b3a - linux-x86_64 7367f4aca86d38e209ef7236b00175df036c03e2 - macos-i386 e5cabb0a4a2b4e47f7b1ae9b802e2b5d0b14eac5 - macos-x86_64 3026c60ddd46d2bcf1cb178fc801095dbfba5286 - winnt-i386 2008eed3965ed9a989a38c22b9c55c02ae9db1f1 - winnt-x86_64 98a48d7a6dbffcd099ea2574a68f04883624d9a1 - -2015-01-24 - linux-i386 96213038f850569f1c4fa6a0d146c6155c0d566b - linux-x86_64 4d87486493c2881edced7b1d2f8beaac32aaa5b5 - macos-i386 17b9fc782e86bffe170abb83a01e0cb7c90a0daa - macos-x86_64 18887bdbd3e6d2a127aa34216fa06e9877b0fbc6 - winnt-i386 10b9b5fa3e9241ef0b6c3b77b0c072a45b585905 - winnt-x86_64 ba71627e46964535b64da56bd0679e5f86fae957 - -2014-12-30 - linux-i386 ab8bba0918d3d2ddbd7fd21f147e223dbf04cece - linux-x86_64 0efe0f7bcbcbeb5494affcc8a2207db448a08c45 - macos-i386 e5097005b0a27c186b8edee24982fd4c3ebba81e - macos-x86_64 6c0bb776e5645fb93b67341b111c715f39b25511 - winnt-i386 2088c5256445b5bb2da57a71f6a9671e5a280477 - winnt-x86_64 950e25bcedc5ba9d96891523c9967f81d5f6c74d - -2014-12-21 - linux-i386 4dea04e278192c5409f43794a98f20a8f59df2d9 - linux-x86_64 3e48c573d3c4d26591feb7bfe988174720f08374 - macos-i386 dc3d498c0567af4a0820e91756dcfff8fde0efac - macos-x86_64 f301bd8c3c93a5c88698c69190e464af1525ac96 - winnt-i386 5b6bc87e302d1ff6ac9b0576292eb7cbff2c3b83 - winnt-x86_64 a8bb8d3a7ed3fc8caf4a33d6b9d2e43544877409 - -2014-12-20 - linux-i386 1cccab5a6ac8e73472bf78cdce019cd1a60d4638 - linux-x86_64 53c176fcda0a40fb77b901303c443de3dce3e58d - macos-i386 bbc23c78ca4307efa6250552a097e6b2ccfe2cc3 - macos-x86_64 4f97a30408c99858ad2b7a7f6edfe3d5b8f0ff3f - winnt-i386 5d77cd604b011100398023e8dc3d98c173247874 - winnt-x86_64 1290dcc2a51e99027803d641c08299abe1265158 - -2014-12-18 - linux-i386 30eec547395093ab9c6a0587a3210666b9272815 - linux-x86_64 20d13252996838680f4356a7addd75403bb11aec - macos-i386 c179a345cb1fbb08f8173133701635ef3c0e03be - macos-x86_64 4f2a877828a2d8ca7d66906529bde01b26d8cef7 - winnt-i386 fa20b54e06badc5fb092981d442e4e831dd9c5f8 - winnt-x86_64 196cae1120f5070e7dd4796d19ed45b9dd01aba2 - -2014-12-08 - linux-i386 853d29bc167748f8a481d5d43fb20ab99e3e16ee - linux-x86_64 57c79c64459145321baa8fc45d51c588d18125ad - macos-i386 43b483c9a389243ce58ba5356c4f71a626dc5658 - macos-x86_64 3777768da6a820f49d789c3477b493b24de59a61 - winnt-i386 b831d1d673db189496f94d3596351d9545687947 - winnt-x86_64 846b677e6fec99690b00595b934fdb30b834c815 - -2014-11-22 - linux-i386 3204c8a38721199f69d2971db887d1dc71a63825 - linux-x86_64 39ca0d02eac184bc764ff9c1f645ca361715c5c2 - macos-i386 ebc1836424c4b3ba49f9adef271c50d2a8e134c0 - macos-x86_64 a2045e95984b65eab4a704152566f8ab9a3be518 - winnt-i386 5e0831b14d2e6ee91ef195dfbc4d9699499d5e99 - winnt-x86_64 d5fa1b58207346061898459955fa7f0b33d77474 - -2014-11-11 - linux-i386 5cbf3346309d303cb954c363097fc4abedf50610 - linux-x86_64 8c1594e227eca6f23ba02daa5f3cd6150ac88907 - macos-i386 f338835a58cc5357ed092a23ba0ddbf2624dfacd - macos-x86_64 b2d03a6a9422c42b7f5ba008c8851ddc89ae693c - winnt-i386 50b851d94181375f0c7a00aacb7d8d63960eddc7 - winnt-x86_64 aa12a1cb80a665f53066a15774360d686b3e5968 - -2014-11-07 - linux-i386 f65ae2b9d94477fec79e444ea489ff98a456e033 - linux-x86_64 1a7f663d8f4e2109240a20d8e63c958e0557d883 - macos-i386 9d82a00bd396c99cc27693864da2364d0394e843 - macos-x86_64 1dc297d8e149384a76dfb7efc7869b82fe663b92 - winnt-i386 d9f87d83c6cbabd7a4794498e4c3a4e94df0740a - winnt-x86_64 74284401082e1b5aff14a72e2152ed5cb55812cf - -2014-10-28 - linux-i386 15fb3dd24140911ba707d8b4b1dd6826732a3ad6 - linux-x86_64 a924d82f5dc987efda6303d3e2c1aeb8ade34faa - macos-i386 bfaddd7eacd1dec4749ab4918fad47f50fa64467 - macos-x86_64 43a91c484f665be2ec0959e2e884ab93cce06a6b - winnt-i386 299de1d99341fed17dc2726e5564dd0ab0ca1dfa - winnt-x86_64 1948ae424458c498f904ea97efb00350a7d8598f - -2014-10-16 - linux-i386 61417861716cd41d8f372be36bb0572e4f29dec8 - linux-x86_64 59be4ff9f547f1ba47ad133ab74151a48bc2659b - macos-i386 cb5267d2e7df8406c26bb0337b1c2e80b125e2cb - macos-x86_64 9283adb4dfd1b60c7bfe38ef755f9187fe7d5580 - winnt-i386 88deb2950fa2b73358bc15763e6373ade6325f53 - winnt-x86_64 0143d4b0e4b20e84dbb27a4440b4b55d369f4456 - -2014-09-19 - linux-i386 c92895421e6fa170dbd713e74334b8c3cf22b817 - linux-x86_64 66ee4126f9e4820cd82e78181931f8ea365904de - macos-i386 e2364b1f1ece338b9fc4c308c472fc2413bff04e - macos-x86_64 09f92f06ab4f048acf71d83dc0426ff1509779a9 - winnt-i386 0c9b75d5b9ca58a7e39290fbe9c54d91db65c42c - winnt-x86_64 180c547aa79ba3069852450a6e833b577c7d4c3d - -2014-09-11 - linux-i386 f18823de75413ab72df91deb9b3b341c02005b2e - linux-x86_64 58d9789472dd955be94903cafd406ce394915297 - macos-i386 07da45add611e7ecea8f9115ee551df1ff354f51 - macos-x86_64 0b82c9c58865fe8298273ee5fafc937db1b80528 - winnt-i386 4782a7014dd53213535f19b1f2a09f640cf00490 - -2014-09-03 - linux-i386 d357756680a60cd00464fa991b71170dcddb2b30 - linux-x86_64 35fd121fda3509cc020d42223017be03a1c19b87 - macos-i386 40aad83e9d97f5a344179f4573807f3ac04775f9 - macos-x86_64 5e64f637019f499585ab100e5072b8eeeba191ed - winnt-i386 fc25a2f6f9ce3a6f11348ffe17e1115ca81fc4db - -2014-08-19 - linux-i386 8d20fc36b8b7339fcd1ae6c118f1becd001c2b08 - linux-x86_64 46e05521f0dceeb831462caa8a54ca1caf21c078 - macos-i386 fd65cf0e9c6fa137db666da289aa4359dbc56ca1 - macos-x86_64 59ba26a9c92af40c08eed443dcfca518718a2ba1 - winnt-i386 cb0c4fa54abebb86d1a4bb28c2b1d084234c3b35 - -2014-08-16 - linux-i386 30ea09ef95aa230ff415319be699c950603a8fb4 - linux-x86_64 95badae811c711ae5d03f837a38f6ae12c8e473a - macos-i386 5b7afe93a4a79416bab0778e7e03a786cf2e9252 - macos-x86_64 e4141beae6e3dae44393d148492ec9ac1ac1ae5c - winnt-i386 580cb0e92ddb1e2f935386183543c3d0152f13b9 - -2014-08-12 - linux-i386 af5e80dba2d845e30039302e57bd516c96b347de - linux-x86_64 42a7786073802d6b47dbb6d2bb071a322964b28e - macos-i386 244595a91534ce3097877d96241ae21d150e670d - macos-x86_64 8c56578bd4610adcc1b608aa841c13f6f9b60d45 - winnt-i386 4708fba1f267c1c32460c7d8b4cd2ed8c32a1ecb - -2014-08-08 - linux-i386 44207002e96c4f1309af70673966ee1e67938f5e - linux-x86_64 5dc5e5aa575814af2d4e40e9dcdca2c55b594bd1 - macos-i386 5d1924057a0d56d033f32680f4b393cdd9c6805a - macos-x86_64 65462ea1e48cb4b4c57ff7e947cd2cc26a8f2723 - winnt-i386 a481b15d35ab2e1d1dcd2f181a2566e097604ffc - -2014-08-06 - linux-i386 eb7c2a87b30db077f6f1c4ea724ebd0e5cc07d1c - linux-x86_64 1672657adb9012df2912bbb2f43466f1c6817e55 - macos-i386 1224207bbfa9f46796940512ac8a7a9ab9f5665b - macos-x86_64 da4afea32d7336a0a91b8fe160d38896385d4ae2 - winnt-i386 2b6b2efe9ec77d3d456c943bb2e54f2281309ef1 - -2014-08-04 - linux-i386 49032ce8c5c2b94d73e298dcbdb09e0b2fbe573c - linux-x86_64 98c83ecc7cac3765d62f5e8b19bdc506e01f3cab - macos-i386 c450260a2edace970089b35fed644eb607b509ba - macos-x86_64 04763ba59b70240d16bdb57845e3511b3b243522 - winnt-i386 15a70b068beb3b85760279496cf62b7406e5e2b2 - -2014-07-30 - linux-i386 4d4e78426060b891cf729d5e3cca86d5aebdd31d - linux-x86_64 2a39bb838bc1c740d41a2ee8054a2c32f1efbec8 - macos-i386 16d1581dad71b1cf551646bc2dfdc920f4dda16c - macos-x86_64 05d836f2195e55f050e68e8bb209405a67fbefcb - winnt-i386 ade95f921ba73848d2ae67d1b8cd7c364e881e86 - -2014-07-29 - mac 53f8bc39132e987d25e022698c3234fee0916ecf - linux b7dbdc89126577fda2eef7d63c5f7fc1d8d28f99 - win 9551454e2ce649d146ad8d856cee3672ab0def02 - -2014-07-26 - mac 9a78815c7fcdb1cdabc93eb120f80444f209d968 - linux b38e7c45292d2cc6a1932fa9a1f349f9b92c0c1d - win 4e955f8b80684ea6c9ca2dd6e2c235ce2d9cf21f diff --git a/tests/resolve.rs b/tests/resolve.rs deleted file mode 100644 index 0e551ac9647..00000000000 --- a/tests/resolve.rs +++ /dev/null @@ -1,374 +0,0 @@ -extern crate hamcrest; -extern crate cargo; - -use std::collections::HashMap; - -use hamcrest::{assert_that, equal_to, contains}; - -use cargo::core::source::{SourceId, GitReference}; -use cargo::core::dependency::Kind::{self, Development}; -use cargo::core::{Dependency, PackageId, Summary, Registry}; -use cargo::util::{CargoResult, ToUrl}; -use cargo::core::resolver::{self, Method}; - -fn resolve(pkg: PackageId, deps: Vec, - registry: &mut R) - -> CargoResult> { - let summary = Summary::new(pkg, deps, HashMap::new()).unwrap(); - let method = Method::Everything; - Ok(try!(resolver::resolve(&summary, &method, registry)).iter().map(|p| { - p.clone() - }).collect()) -} - -trait ToDep { - fn to_dep(self) -> Dependency; -} - -impl ToDep for &'static str { - fn to_dep(self) -> Dependency { - let url = "http://example.com".to_url().unwrap(); - let source_id = SourceId::for_registry(&url); - Dependency::parse(self, Some("1.0.0"), &source_id).unwrap() - } -} - -impl ToDep for Dependency { - fn to_dep(self) -> Dependency { - self - } -} - -trait ToPkgId { - fn to_pkgid(&self) -> PackageId; -} - -impl ToPkgId for &'static str { - fn to_pkgid(&self) -> PackageId { - PackageId::new(*self, "1.0.0", ®istry_loc()).unwrap() - } -} - -impl ToPkgId for (&'static str, &'static str) { - fn to_pkgid(&self) -> PackageId { - let (name, vers) = *self; - PackageId::new(name, vers, ®istry_loc()).unwrap() - } -} - -macro_rules! pkg { - ($pkgid:expr => [$($deps:expr),+]) => ({ - let d: Vec = vec![$($deps.to_dep()),+]; - - Summary::new($pkgid.to_pkgid(), d, HashMap::new()).unwrap() - }); - - ($pkgid:expr) => ( - Summary::new($pkgid.to_pkgid(), Vec::new(), HashMap::new()).unwrap() - ) -} - -fn registry_loc() -> SourceId { - let remote = "http://example.com".to_url().unwrap(); - SourceId::for_registry(&remote) -} - -fn pkg(name: &str) -> Summary { - Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap() -} - -fn pkg_id(name: &str) -> PackageId { - PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() -} - -fn pkg_id_loc(name: &str, loc: &str) -> PackageId { - let remote = loc.to_url(); - let master = GitReference::Branch("master".to_string()); - let source_id = SourceId::for_git(&remote.unwrap(), master); - - PackageId::new(name, "1.0.0", &source_id).unwrap() -} - -fn pkg_loc(name: &str, loc: &str) -> Summary { - Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap() -} - -fn dep(name: &str) -> Dependency { dep_req(name, "1.0.0") } -fn dep_req(name: &str, req: &str) -> Dependency { - let url = "http://example.com".to_url().unwrap(); - let source_id = SourceId::for_registry(&url); - Dependency::parse(name, Some(req), &source_id).unwrap() -} - -fn dep_loc(name: &str, location: &str) -> Dependency { - let url = location.to_url().unwrap(); - let master = GitReference::Branch("master".to_string()); - let source_id = SourceId::for_git(&url, master); - Dependency::parse(name, Some("1.0.0"), &source_id).unwrap() -} -fn dep_kind(name: &str, kind: Kind) -> Dependency { - dep(name).clone_inner().set_kind(kind).into_dependency() -} - -fn registry(pkgs: Vec) -> Vec { - pkgs -} - -fn names(names: &[P]) -> Vec { - names.iter().map(|name| name.to_pkgid()).collect() -} - -fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { - names.iter() - .map(|&(name, loc)| pkg_id_loc(name, loc)).collect() -} - -#[test] -fn test_resolving_empty_dependency_list() { - let res = resolve(pkg_id("root"), Vec::new(), - &mut registry(vec!())).unwrap(); - - assert_that(&res, equal_to(&names(&["root"]))); -} - -#[test] -fn test_resolving_only_package() { - let mut reg = registry(vec!(pkg("foo"))); - let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); - - assert_that(&res.unwrap(), contains(names(&["root", "foo"])).exactly()); -} - -#[test] -fn test_resolving_one_dep() { - let mut reg = registry(vec!(pkg("foo"), pkg("bar"))); - let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg); - - assert_that(&res.unwrap(), contains(names(&["root", "foo"])).exactly()); -} - -#[test] -fn test_resolving_multiple_deps() { - let mut reg = registry(vec!(pkg!("foo"), pkg!("bar"), pkg!("baz"))); - let res = resolve(pkg_id("root"), vec![dep("foo"), dep("baz")], - &mut reg).unwrap(); - - assert_that(&res, contains(names(&["root", "foo", "baz"])).exactly()); -} - -#[test] -fn test_resolving_transitive_deps() { - let mut reg = registry(vec!(pkg!("foo"), pkg!("bar" => ["foo"]))); - let res = resolve(pkg_id("root"), vec![dep("bar")], &mut reg).unwrap(); - - assert_that(&res, contains(names(&["root", "foo", "bar"]))); -} - -#[test] -fn test_resolving_common_transitive_deps() { - let mut reg = registry(vec!(pkg!("foo" => ["bar"]), pkg!("bar"))); - let res = resolve(pkg_id("root"), vec![dep("foo"), dep("bar")], - &mut reg).unwrap(); - - assert_that(&res, contains(names(&["root", "foo", "bar"]))); -} - -#[test] -fn test_resolving_with_same_name() { - let list = vec![pkg_loc("foo", "http://first.example.com"), - pkg_loc("bar", "http://second.example.com")]; - - let mut reg = registry(list); - let res = resolve(pkg_id("root"), - vec![dep_loc("foo", "http://first.example.com"), - dep_loc("bar", "http://second.example.com")], - &mut reg); - - let mut names = loc_names(&[("foo", "http://first.example.com"), - ("bar", "http://second.example.com")]); - - names.push(pkg_id("root")); - - assert_that(&res.unwrap(), contains(names).exactly()); -} - -#[test] -fn test_resolving_with_dev_deps() { - let mut reg = registry(vec!( - pkg!("foo" => ["bar", dep_kind("baz", Development)]), - pkg!("baz" => ["bat", dep_kind("bam", Development)]), - pkg!("bar"), - pkg!("bat") - )); - - let res = resolve(pkg_id("root"), - vec![dep("foo"), dep_kind("baz", Development)], - &mut reg).unwrap(); - - assert_that(&res, contains(names(&["root", "foo", "bar", "baz"]))); -} - -#[test] -fn resolving_with_many_versions() { - let mut reg = registry(vec!( - pkg!(("foo", "1.0.1")), - pkg!(("foo", "1.0.2")), - )); - - let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg).unwrap(); - - assert_that(&res, contains(names(&[("root", "1.0.0"), - ("foo", "1.0.2")]))); -} - -#[test] -fn resolving_with_specific_version() { - let mut reg = registry(vec!( - pkg!(("foo", "1.0.1")), - pkg!(("foo", "1.0.2")), - )); - - let res = resolve(pkg_id("root"), vec![dep_req("foo", "=1.0.1")], - &mut reg).unwrap(); - - assert_that(&res, contains(names(&[("root", "1.0.0"), - ("foo", "1.0.1")]))); -} - -#[test] -fn resolving_incompat_versions() { - let mut reg = registry(vec!( - pkg!(("foo", "1.0.1")), - pkg!(("foo", "1.0.2")), - pkg!("bar" => [dep_req("foo", "=1.0.2")]), - )); - - assert!(resolve(pkg_id("root"), vec![ - dep_req("foo", "=1.0.1"), - dep("bar"), - ], &mut reg).is_err()); -} - -#[test] -fn resolving_backtrack() { - let mut reg = registry(vec!( - pkg!(("foo", "1.0.2") => [dep("bar")]), - pkg!(("foo", "1.0.1") => [dep("baz")]), - pkg!("bar" => [dep_req("foo", "=2.0.2")]), - pkg!("baz"), - )); - - let res = resolve(pkg_id("root"), vec![ - dep_req("foo", "^1"), - ], &mut reg).unwrap(); - - assert_that(&res, contains(names(&[("root", "1.0.0"), - ("foo", "1.0.1"), - ("baz", "1.0.0")]))); -} - -#[test] -fn resolving_allows_multiple_compatible_versions() { - let mut reg = registry(vec!( - pkg!(("foo", "1.0.0")), - pkg!(("foo", "2.0.0")), - pkg!(("foo", "0.1.0")), - pkg!(("foo", "0.2.0")), - - pkg!("bar" => ["d1", "d2", "d3", "d4"]), - pkg!("d1" => [dep_req("foo", "1")]), - pkg!("d2" => [dep_req("foo", "2")]), - pkg!("d3" => [dep_req("foo", "0.1")]), - pkg!("d4" => [dep_req("foo", "0.2")]), - )); - - let res = resolve(pkg_id("root"), vec![ - dep("bar"), - ], &mut reg).unwrap(); - - assert_that(&res, contains(names(&[("root", "1.0.0"), - ("foo", "1.0.0"), - ("foo", "2.0.0"), - ("foo", "0.1.0"), - ("foo", "0.2.0"), - ("d1", "1.0.0"), - ("d2", "1.0.0"), - ("d3", "1.0.0"), - ("d4", "1.0.0"), - ("bar", "1.0.0")]))); -} - -#[test] -fn resolving_with_deep_backtracking() { - let mut reg = registry(vec!( - pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), - pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), - - pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), - dep_req("other", "1")]), - pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), - - pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), - pkg!(("baz", "1.0.1")), - - pkg!(("dep_req", "1.0.0")), - pkg!(("dep_req", "2.0.0")), - )); - - let res = resolve(pkg_id("root"), vec![ - dep_req("foo", "1"), - ], &mut reg).unwrap(); - - assert_that(&res, contains(names(&[("root", "1.0.0"), - ("foo", "1.0.0"), - ("bar", "2.0.0"), - ("baz", "1.0.1")]))); -} - -#[test] -fn resolving_but_no_exists() { - let mut reg = registry(vec!( - )); - - let res = resolve(pkg_id("root"), vec![ - dep_req("foo", "1"), - ], &mut reg); - assert!(res.is_err()); - - assert_eq!(res.err().unwrap().to_string(), "\ -no matching package named `foo` found (required by `root`) -location searched: registry http://example.com/ -version required: ^1\ -"); -} - -#[test] -fn resolving_cycle() { - let mut reg = registry(vec!( - pkg!("foo" => ["foo"]), - )); - - let _ = resolve(pkg_id("root"), vec![ - dep_req("foo", "1"), - ], &mut reg); -} - -#[test] -fn hard_equality() { - extern crate env_logger; - let mut reg = registry(vec!( - pkg!(("foo", "1.0.1")), - pkg!(("foo", "1.0.0")), - - pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]), - )); - - let res = resolve(pkg_id("root"), vec![ - dep_req("bar", "1"), - dep_req("foo", "=1.0.0"), - ], &mut reg).unwrap(); - - assert_that(&res, contains(names(&[("root", "1.0.0"), - ("foo", "1.0.0"), - ("bar", "1.0.0")]))); -} diff --git a/tests/support/git.rs b/tests/support/git.rs deleted file mode 100644 index 082f07fab6a..00000000000 --- a/tests/support/git.rs +++ /dev/null @@ -1,121 +0,0 @@ -use std::fs::{self, File}; -use std::io::prelude::*; -use std::path::{Path, PathBuf}; - -use url::Url; -use git2; - -use cargo::util::ProcessError; -use support::{ProjectBuilder, project, path2url}; - -pub struct RepoBuilder { - repo: git2::Repository, - files: Vec, -} - -pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) } - -impl RepoBuilder { - pub fn init(p: &Path) -> RepoBuilder { - fs::create_dir_all(p.parent().unwrap()).unwrap(); - let repo = git2::Repository::init(p).unwrap(); - { - let mut config = repo.config().unwrap(); - config.set_str("user.name", "name").unwrap(); - config.set_str("user.email", "email").unwrap(); - } - RepoBuilder { repo: repo, files: Vec::new() } - } - - pub fn file(self, path: &str, contents: &str) -> RepoBuilder { - let mut me = self.nocommit_file(path, contents); - me.files.push(PathBuf::from(path)); - me - } - - pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { - let dst = self.repo.workdir().unwrap().join(path); - fs::create_dir_all(dst.parent().unwrap()).unwrap(); - File::create(&dst).unwrap().write_all(contents.as_bytes()).unwrap(); - self - } - - pub fn build(&self) { - let mut index = self.repo.index().unwrap(); - for file in self.files.iter() { - index.add_path(file).unwrap(); - } - index.write().unwrap(); - let id = index.write_tree().unwrap(); - let tree = self.repo.find_tree(id).unwrap(); - let sig = self.repo.signature().unwrap(); - self.repo.commit(Some("HEAD"), &sig, &sig, - "Initial commit", &tree, &[]).unwrap(); - } - - pub fn url(&self) -> Url { - path2url(self.repo.workdir().unwrap().to_path_buf()) - } -} - -pub fn new(name: &str, callback: F) -> Result - where F: FnOnce(ProjectBuilder) -> ProjectBuilder -{ - let mut git_project = project(name); - git_project = callback(git_project); - git_project.build(); - - let repo = git2::Repository::init(&git_project.root()).unwrap(); - let mut cfg = repo.config().unwrap(); - cfg.set_str("user.email", "foo@bar.com").unwrap(); - cfg.set_str("user.name", "Foo Bar").unwrap(); - drop(cfg); - add(&repo); - commit(&repo); - Ok(git_project) -} - -pub fn add(repo: &git2::Repository) { - // FIXME(libgit2/libgit2#2514): apparently add_all will add all submodules - // as well, and then fail b/c they're a directory. As a stopgap, we just - // ignore all submodules. - let mut s = repo.submodules().unwrap(); - for submodule in s.iter_mut() { - submodule.add_to_index(false).unwrap(); - } - let mut index = repo.index().unwrap(); - index.add_all(["*"].iter(), git2::ADD_DEFAULT, - Some(&mut (|a, _b| { - if s.iter().any(|s| a.starts_with(s.path())) {1} else {0} - }))).unwrap(); - index.write().unwrap(); -} - -pub fn add_submodule<'a>(repo: &'a git2::Repository, url: &str, - path: &Path) -> git2::Submodule<'a> -{ - let path = path.to_str().unwrap().replace(r"\", "/"); - let mut s = repo.submodule(url, Path::new(&path), false).unwrap(); - let subrepo = s.open().unwrap(); - let mut origin = subrepo.find_remote("origin").unwrap(); - origin.add_fetch("refs/heads/*:refs/heads/*").unwrap(); - origin.fetch(&[], None).unwrap(); - origin.save().unwrap(); - subrepo.checkout_head(None).unwrap(); - s.add_finalize().unwrap(); - return s; -} - -pub fn commit(repo: &git2::Repository) -> git2::Oid { - let tree_id = repo.index().unwrap().write_tree().unwrap(); - let sig = repo.signature().unwrap(); - let mut parents = Vec::new(); - match repo.head().ok().map(|h| h.target().unwrap()) { - Some(parent) => parents.push(repo.find_commit(parent).unwrap()), - None => {} - } - let parents = parents.iter().collect::>(); - repo.commit(Some("HEAD"), &sig, &sig, "test", - &repo.find_tree(tree_id).unwrap(), - &parents).unwrap() -} diff --git a/tests/support/mod.rs b/tests/support/mod.rs deleted file mode 100644 index 0f7602e99da..00000000000 --- a/tests/support/mod.rs +++ /dev/null @@ -1,532 +0,0 @@ -use std::env; -use std::error::Error; -use std::ffi::OsStr; -use std::fmt; -use std::fs; -use std::io::prelude::*; -use std::os; -use std::path::{Path, PathBuf}; -use std::process::Output; -use std::str; - -use url::Url; -use hamcrest as ham; -use cargo::util::{process,ProcessBuilder}; -use cargo::util::ProcessError; - -use support::paths::CargoPathExt; - -pub mod paths; -pub mod git; -pub mod registry; - -/* - * - * ===== Builders ===== - * - */ - -#[derive(PartialEq,Clone)] -struct FileBuilder { - path: PathBuf, - body: String -} - -impl FileBuilder { - pub fn new(path: PathBuf, body: &str) -> FileBuilder { - FileBuilder { path: path, body: body.to_string() } - } - - fn mk(&self) -> Result<(), String> { - try!(mkdir_recursive(&self.dirname())); - - let mut file = try!( - fs::File::create(&self.path) - .with_err_msg(format!("Could not create file; path={}", - self.path.display()))); - - file.write_all(self.body.as_bytes()) - .with_err_msg(format!("Could not write to file; path={}", - self.path.display())) - } - - fn dirname(&self) -> &Path { - self.path.parent().unwrap() - } -} - -#[derive(PartialEq,Clone)] -struct SymlinkBuilder { - dst: PathBuf, - src: PathBuf, -} - -impl SymlinkBuilder { - pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { - SymlinkBuilder { dst: dst, src: src } - } - - #[cfg(unix)] - fn mk(&self) -> Result<(), String> { - try!(mkdir_recursive(&self.dirname())); - - os::unix::fs::symlink(&self.dst, &self.src) - .with_err_msg(format!("Could not create symlink; dst={} src={}", - self.dst.display(), self.src.display())) - } - - #[cfg(windows)] - fn mk(&self) -> Result<(), String> { - try!(mkdir_recursive(&self.dirname())); - - os::windows::fs::symlink_file(&self.dst, &self.src) - .with_err_msg(format!("Could not create symlink; dst={} src={}", - self.dst.display(), self.src.display())) - } - - fn dirname(&self) -> &Path { - self.src.parent().unwrap() - } -} - -#[derive(PartialEq,Clone)] -pub struct ProjectBuilder { - name: String, - root: PathBuf, - files: Vec, - symlinks: Vec -} - -impl ProjectBuilder { - pub fn new(name: &str, root: PathBuf) -> ProjectBuilder { - ProjectBuilder { - name: name.to_string(), - root: root, - files: vec!(), - symlinks: vec!() - } - } - - pub fn root(&self) -> PathBuf { - self.root.clone() - } - - pub fn url(&self) -> Url { path2url(self.root()) } - - pub fn bin(&self, b: &str) -> PathBuf { - self.build_dir().join("debug").join(&format!("{}{}", b, - env::consts::EXE_SUFFIX)) - } - - pub fn release_bin(&self, b: &str) -> PathBuf { - self.build_dir().join("release").join(&format!("{}{}", b, - env::consts::EXE_SUFFIX)) - } - - pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { - self.build_dir().join(target).join("debug") - .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) - } - - pub fn build_dir(&self) -> PathBuf { - self.root.join("target") - } - - pub fn process>(&self, program: T) -> ProcessBuilder { - let mut p = process(program).unwrap(); - p.cwd(&self.root()) - .env("HOME", &paths::home()) - .env_remove("CARGO_HOME") // make sure we don't pick up an outer one - .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows - return p; - } - - pub fn cargo(&self, cmd: &str) -> ProcessBuilder { - let mut p = self.process(&cargo_dir().join("cargo")); - p.arg(cmd); - return p; - } - - pub fn cargo_process(&self, cmd: &str) -> ProcessBuilder { - self.build(); - self.cargo(cmd) - } - - pub fn file>(mut self, path: B, - body: &str) -> ProjectBuilder { - self.files.push(FileBuilder::new(self.root.join(path), body)); - self - } - - pub fn symlink>(mut self, dst: T, - src: T) -> ProjectBuilder { - self.symlinks.push(SymlinkBuilder::new(self.root.join(dst), - self.root.join(src))); - self - } - - // TODO: return something different than a ProjectBuilder - pub fn build(&self) -> &ProjectBuilder { - match self.build_with_result() { - Err(e) => panic!(e), - _ => return self - } - } - - pub fn build_with_result(&self) -> Result<(), String> { - // First, clean the directory if it already exists - try!(self.rm_root()); - - // Create the empty directory - try!(mkdir_recursive(&self.root)); - - for file in self.files.iter() { - try!(file.mk()); - } - - for symlink in self.symlinks.iter() { - try!(symlink.mk()); - } - - Ok(()) - } - - fn rm_root(&self) -> Result<(), String> { - if self.root.c_exists() { - rmdir_recursive(&self.root) - } else { - Ok(()) - } - } -} - -// Generates a project layout -pub fn project(name: &str) -> ProjectBuilder { - ProjectBuilder::new(name, paths::root().join(name)) -} - -// === Helpers === - -pub fn mkdir_recursive(path: &Path) -> Result<(), String> { - fs::create_dir_all(path) - .with_err_msg(format!("could not create directory; path={}", - path.display())) -} - -pub fn rmdir_recursive(path: &Path) -> Result<(), String> { - path.rm_rf() - .with_err_msg(format!("could not rm directory; path={}", - path.display())) -} - -pub fn main_file(println: &str, deps: &[&str]) -> String { - let mut buf = String::new(); - - for dep in deps.iter() { - buf.push_str(&format!("extern crate {};\n", dep)); - } - - buf.push_str("fn main() { println!("); - buf.push_str(&println); - buf.push_str("); }\n"); - - buf.to_string() -} - -trait ErrMsg { - fn with_err_msg(self, val: String) -> Result; -} - -impl ErrMsg for Result { - fn with_err_msg(self, val: String) -> Result { - match self { - Ok(val) => Ok(val), - Err(err) => Err(format!("{}; original={}", val, err)) - } - } -} - -// Path to cargo executables -pub fn cargo_dir() -> PathBuf { - env::var_os("CARGO_BIN_PATH").map(PathBuf::from).or_else(|| { - env::current_exe().ok().as_ref().and_then(|s| s.parent()) - .map(|s| s.to_path_buf()) - }).unwrap_or_else(|| { - panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test") - }) -} - -/// Returns an absolute path in the filesystem that `path` points to. The -/// returned path does not contain any symlinks in its hierarchy. -/* - * - * ===== Matchers ===== - * - */ - -#[derive(Clone)] -pub struct Execs { - expect_stdout: Option, - expect_stdin: Option, - expect_stderr: Option, - expect_exit_code: Option -} - -impl Execs { - - pub fn with_stdout(mut self, expected: S) -> Execs { - self.expect_stdout = Some(expected.to_string()); - self - } - - pub fn with_stderr(mut self, expected: S) -> Execs { - self.expect_stderr = Some(expected.to_string()); - self - } - - pub fn with_status(mut self, expected: i32) -> Execs { - self.expect_exit_code = Some(expected); - self - } - - fn match_output(&self, actual: &Output) -> ham::MatchResult { - self.match_status(actual) - .and(self.match_stdout(actual)) - .and(self.match_stderr(actual)) - } - - fn match_status(&self, actual: &Output) -> ham::MatchResult { - match self.expect_exit_code { - None => ham::success(), - Some(code) => { - ham::expect( - actual.status.code() == Some(code), - format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}", - actual.status, - String::from_utf8_lossy(&actual.stdout), - String::from_utf8_lossy(&actual.stderr))) - } - } - } - - fn match_stdout(&self, actual: &Output) -> ham::MatchResult { - self.match_std(self.expect_stdout.as_ref(), &actual.stdout, - "stdout", &actual.stderr) - } - - fn match_stderr(&self, actual: &Output) -> ham::MatchResult { - self.match_std(self.expect_stderr.as_ref(), &actual.stderr, - "stderr", &actual.stdout) - } - - #[allow(deprecated)] // connect => join in 1.3 - fn match_std(&self, expected: Option<&String>, actual: &[u8], - description: &str, extra: &[u8]) -> ham::MatchResult { - match expected.map(|s| &s[..]) { - None => ham::success(), - Some(out) => { - let actual = match str::from_utf8(actual) { - Err(..) => return Err(format!("{} was not utf8 encoded", - description)), - Ok(actual) => actual, - }; - // Let's not deal with \r\n vs \n on windows... - let actual = actual.replace("\r", ""); - let actual = actual.replace("\t", ""); - - let a = actual.lines(); - let e = out.lines(); - - let diffs = zip_all(a, e).enumerate(); - let diffs = diffs.filter_map(|(i, (a,e))| { - match (a, e) { - (Some(a), Some(e)) => { - if lines_match(&e, &a) { - None - } else { - Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a)) - } - }, - (Some(a), None) => { - Some(format!("{:3} -\n + |{}|\n", i, a)) - }, - (None, Some(e)) => { - Some(format!("{:3} - |{}|\n +\n", i, e)) - }, - (None, None) => panic!("Cannot get here") - } - }); - - let diffs = diffs.collect::>().connect("\n"); - - ham::expect(diffs.len() == 0, - format!("differences:\n\ - {}\n\n\ - other output:\n\ - `{}`", diffs, - String::from_utf8_lossy(extra))) - } - } - } -} - -fn lines_match(expected: &str, mut actual: &str) -> bool { - for part in expected.split("[..]") { - match actual.find(part) { - Some(i) => actual = &actual[i + part.len()..], - None => { - return false - } - } - } - actual.len() == 0 || expected.ends_with("[..]") -} - -struct ZipAll { - first: I1, - second: I2, -} - -impl, I2: Iterator> Iterator for ZipAll { - type Item = (Option, Option); - fn next(&mut self) -> Option<(Option, Option)> { - let first = self.first.next(); - let second = self.second.next(); - - match (first, second) { - (None, None) => None, - (a, b) => Some((a, b)) - } - } -} - -fn zip_all, I2: Iterator>(a: I1, b: I2) -> ZipAll { - ZipAll { - first: a, - second: b, - } -} - -impl fmt::Display for Execs { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "execs") - } -} - -impl ham::Matcher for Execs { - fn matches(&self, mut process: ProcessBuilder) -> ham::MatchResult { - self.matches(&mut process) - } -} - -impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs { - fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult { - let res = process.exec_with_output(); - - match res { - Ok(out) => self.match_output(&out), - Err(ProcessError { output: Some(ref out), .. }) => { - self.match_output(out) - } - Err(e) => { - let mut s = format!("could not exec process {}: {}", process, e); - match e.cause() { - Some(cause) => s.push_str(&format!("\ncaused by: {}", - cause.description())), - None => {} - } - Err(s) - } - } - } -} - -pub fn execs() -> Execs { - Execs { - expect_stdout: None, - expect_stderr: None, - expect_stdin: None, - expect_exit_code: None - } -} - -#[derive(Clone)] -pub struct ShellWrites { - expected: String -} - -impl fmt::Display for ShellWrites { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "`{}` written to the shell", self.expected) - } -} - -impl<'a> ham::Matcher<&'a [u8]> for ShellWrites { - fn matches(&self, actual: &[u8]) - -> ham::MatchResult - { - let actual = String::from_utf8_lossy(actual); - let actual = actual.to_string(); - ham::expect(actual == self.expected, actual) - } -} - -pub fn shell_writes(string: T) -> ShellWrites { - ShellWrites { expected: string.to_string() } -} - -pub trait Tap { - fn tap(mut self, callback: F) -> Self; -} - -impl Tap for T { - fn tap(mut self, callback: F) -> T { - callback(&mut self); - self - } -} - -pub fn basic_bin_manifest(name: &str) -> String { - format!(r#" - [package] - - name = "{}" - version = "0.5.0" - authors = ["wycats@example.com"] - - [[bin]] - - name = "{}" - "#, name, name) -} - -pub fn basic_lib_manifest(name: &str) -> String { - format!(r#" - [package] - - name = "{}" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "{}" - "#, name, name) -} - -pub fn path2url(p: PathBuf) -> Url { - Url::from_file_path(&*p).ok().unwrap() -} - -pub static RUNNING: &'static str = " Running"; -pub static COMPILING: &'static str = " Compiling"; -pub static FRESH: &'static str = " Fresh"; -pub static UPDATING: &'static str = " Updating"; -pub static ADDING: &'static str = " Adding"; -pub static REMOVING: &'static str = " Removing"; -pub static DOCTEST: &'static str = " Doc-tests"; -pub static PACKAGING: &'static str = " Packaging"; -pub static DOWNLOADING: &'static str = " Downloading"; -pub static UPLOADING: &'static str = " Uploading"; -pub static VERIFYING: &'static str = " Verifying"; -pub static ARCHIVING: &'static str = " Archiving"; diff --git a/tests/support/paths.rs b/tests/support/paths.rs deleted file mode 100644 index 57f59d712ef..00000000000 --- a/tests/support/paths.rs +++ /dev/null @@ -1,156 +0,0 @@ -use std::env; -use std::fs; -use std::io::prelude::*; -use std::io::{self, ErrorKind}; -use std::path::{Path, PathBuf}; -use std::sync::{Once, ONCE_INIT}; -use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; - -use filetime::{self, FileTime}; - -static CARGO_INTEGRATION_TEST_DIR : &'static str = "cit"; -static NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT; -thread_local!(static TASK_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst)); - -pub fn root() -> PathBuf { - let mut path = env::current_exe().unwrap(); - path.pop(); // chop off exe name - path.pop(); // chop off 'debug' - - // If `cargo test` is run manually then our path looks like - // `target/debug/foo`, in which case our `path` is already pointing at - // `target`. If, however, `cargo test --target $target` is used then the - // output is `target/$target/debug/foo`, so our path is pointing at - // `target/$target`. Here we conditionally pop the `$target` name. - if path.file_name().and_then(|s| s.to_str()) != Some("target") { - path.pop(); - } - - path.join(CARGO_INTEGRATION_TEST_DIR) - .join(&TASK_ID.with(|my_id| format!("t{}", my_id))) -} - -pub fn home() -> PathBuf { - root().join("home") -} - -pub trait CargoPathExt { - fn rm_rf(&self) -> io::Result<()>; - fn mkdir_p(&self) -> io::Result<()>; - fn move_into_the_past(&self) -> io::Result<()>; - - // cargo versions of the standard PathExt trait - fn c_exists(&self) -> bool; - fn c_is_file(&self) -> bool; - fn c_is_dir(&self) -> bool; - fn c_metadata(&self) -> io::Result; -} - -impl CargoPathExt for Path { - /* Technically there is a potential race condition, but we don't - * care all that much for our tests - */ - fn rm_rf(&self) -> io::Result<()> { - if self.c_exists() { - for file in fs::read_dir(self).unwrap() { - let file = try!(file).path(); - - if file.c_is_dir() { - try!(file.rm_rf()); - } else { - // On windows we can't remove a readonly file, and git will - // often clone files as readonly. As a result, we have some - // special logic to remove readonly files on windows. - match fs::remove_file(&file) { - Ok(()) => {} - Err(ref e) if cfg!(windows) && - e.kind() == ErrorKind::PermissionDenied => { - let mut p = file.c_metadata().unwrap().permissions(); - p.set_readonly(false); - fs::set_permissions(&file, p).unwrap(); - try!(fs::remove_file(&file)); - } - Err(e) => return Err(e) - } - } - } - fs::remove_dir(self) - } else { - Ok(()) - } - } - - fn mkdir_p(&self) -> io::Result<()> { - fs::create_dir_all(self) - } - - fn move_into_the_past(&self) -> io::Result<()> { - if self.c_is_file() { - try!(time_travel(self)); - } else { - try!(recurse(self, &self.join("target"))); - } - return Ok(()); - - fn recurse(p: &Path, bad: &Path) -> io::Result<()> { - if p.c_is_file() { - time_travel(p) - } else if p.starts_with(bad) { - Ok(()) - } else { - for f in try!(fs::read_dir(p)) { - let f = try!(f).path(); - try!(recurse(&f, bad)); - } - Ok(()) - } - } - - fn time_travel(path: &Path) -> io::Result<()> { - let stat = try!(path.c_metadata()); - - let mtime = FileTime::from_last_modification_time(&stat); - let newtime = mtime.seconds_relative_to_1970() - 3600; - let nanos = mtime.nanoseconds(); - let newtime = FileTime::from_seconds_since_1970(newtime, nanos); - - // Sadly change_file_times has a failure mode where a readonly file - // cannot have its times changed on windows. - match filetime::set_file_times(path, newtime, newtime) { - Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => {} - e => return e, - } - let mut perms = stat.permissions(); - perms.set_readonly(false); - try!(fs::set_permissions(path, perms)); - filetime::set_file_times(path, newtime, newtime) - } - } - - fn c_exists(&self) -> bool { - fs::metadata(self).is_ok() - } - - fn c_is_file(&self) -> bool { - fs::metadata(self).map(|m| m.is_file()).unwrap_or(false) - } - - fn c_is_dir(&self) -> bool { - fs::metadata(self).map(|m| m.is_dir()).unwrap_or(false) - } - - fn c_metadata(&self) -> io::Result { - fs::metadata(self) - } -} - -/// Ensure required test directories exist and are empty -pub fn setup() { - debug!("path setup; root={}; home={}", root().display(), home().display()); - static INIT: Once = ONCE_INIT; - INIT.call_once(|| { - root().parent().unwrap().mkdir_p().unwrap(); - }); - root().rm_rf().unwrap(); - home().mkdir_p().unwrap(); -} diff --git a/tests/support/registry.rs b/tests/support/registry.rs deleted file mode 100644 index 861bc5a11dc..00000000000 --- a/tests/support/registry.rs +++ /dev/null @@ -1,144 +0,0 @@ -use std::fs::{self, File}; -use std::io::prelude::*; -use std::path::{PathBuf, Path}; - -use flate2::Compression::Default; -use flate2::write::GzEncoder; -use git2; -use rustc_serialize::hex::ToHex; -use tar::Archive; -use url::Url; - -use support::project; -use support::paths; -use support::git::repo; -use cargo::util::Sha256; - -pub fn registry_path() -> PathBuf { paths::root().join("registry") } -pub fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } -pub fn dl_path() -> PathBuf { paths::root().join("dl") } -pub fn dl_url() -> Url { Url::from_file_path(&*dl_path()).ok().unwrap() } - -pub fn init() { - let config = paths::home().join(".cargo/config"); - fs::create_dir_all(config.parent().unwrap()).unwrap(); - File::create(&config).unwrap().write_all(format!(r#" - [registry] - index = "{reg}" - token = "api-token" - "#, reg = registry()).as_bytes()).unwrap(); - - // Init a new registry - repo(®istry_path()) - .file("config.json", &format!(r#" - {{"dl":"{}","api":""}} - "#, dl_url())) - .build(); -} - -pub fn mock_archive(name: &str, version: &str, deps: &[(&str, &str, &str)]) { - let mut manifest = format!(r#" - [package] - name = "{}" - version = "{}" - authors = [] - "#, name, version); - for &(dep, req, kind) in deps.iter() { - manifest.push_str(&format!(r#" - [{}dependencies.{}] - version = "{}" - "#, match kind { - "build" => "build-", - "dev" => "dev-", - _ => "" - }, dep, req)); - } - let p = project(name) - .file("Cargo.toml", &manifest) - .file("src/lib.rs", ""); - p.build(); - - let dst = mock_archive_dst(name, version); - fs::create_dir_all(dst.parent().unwrap()).unwrap(); - let f = File::create(&dst).unwrap(); - let a = Archive::new(GzEncoder::new(f, Default)); - a.append_file(&format!("{}-{}/Cargo.toml", name, version), - &mut File::open(&p.root().join("Cargo.toml")).unwrap()).unwrap(); - a.append_file(&format!("{}-{}/src/lib.rs", name, version), - &mut File::open(&p.root().join("src/lib.rs")).unwrap()).unwrap(); - a.finish().unwrap(); -} - -pub fn mock_archive_dst(name: &str, version: &str) -> PathBuf { - dl_path().join(name).join(version).join("download") -} - -pub fn mock_pkg(name: &str, version: &str, deps: &[(&str, &str, &str)]) { - mock_pkg_yank(name, version, deps, false) -} - -pub fn mock_pkg_yank(name: &str, version: &str, deps: &[(&str, &str, &str)], - yanked: bool) { - mock_archive(name, version, deps); - let mut c = Vec::new(); - File::open(&mock_archive_dst(name, version)).unwrap() - .read_to_end(&mut c).unwrap(); - let line = pkg(name, version, deps, &cksum(&c), yanked); - - let file = match name.len() { - 1 => format!("1/{}", name), - 2 => format!("2/{}", name), - 3 => format!("3/{}/{}", &name[..1], name), - _ => format!("{}/{}/{}", &name[0..2], &name[2..4], name), - }; - publish(&file, &line); -} - -pub fn publish(file: &str, line: &str) { - let repo = git2::Repository::open(®istry_path()).unwrap(); - let mut index = repo.index().unwrap(); - { - let dst = registry_path().join(file); - let mut prev = String::new(); - let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev)); - fs::create_dir_all(dst.parent().unwrap()).unwrap(); - File::create(&dst).unwrap() - .write_all((prev + line + "\n").as_bytes()).unwrap(); - } - index.add_path(Path::new(file)).unwrap(); - index.write().unwrap(); - let id = index.write_tree().unwrap(); - let tree = repo.find_tree(id).unwrap(); - let sig = repo.signature().unwrap(); - let parent = repo.refname_to_id("refs/heads/master").unwrap(); - let parent = repo.find_commit(parent).unwrap(); - repo.commit(Some("HEAD"), &sig, &sig, - "Another commit", &tree, - &[&parent]).unwrap(); -} - -#[allow(deprecated)] // connect => join in 1.3 -pub fn pkg(name: &str, vers: &str, deps: &[(&str, &str, &str)], cksum: &str, - yanked: bool) -> String { - let deps = deps.iter().map(|&(a, b, c)| dep(a, b, c)).collect::>(); - format!("{{\"name\":\"{}\",\"vers\":\"{}\",\ - \"deps\":[{}],\"cksum\":\"{}\",\"features\":{{}},\ - \"yanked\":{}}}", - name, vers, deps.connect(","), cksum, yanked) -} - -pub fn dep(name: &str, req: &str, kind: &str) -> String { - format!("{{\"name\":\"{}\",\ - \"req\":\"{}\",\ - \"features\":[],\ - \"default_features\":false,\ - \"target\":null,\ - \"optional\":false,\ - \"kind\":\"{}\"}}", name, req, kind) -} - -pub fn cksum(s: &[u8]) -> String { - let mut sha = Sha256::new(); - sha.update(s); - sha.finish().to_hex() -} diff --git a/tests/test_bad_config.rs b/tests/test_bad_config.rs deleted file mode 100644 index 069783906a3..00000000000 --- a/tests/test_bad_config.rs +++ /dev/null @@ -1,260 +0,0 @@ -use support::{project, execs}; -use hamcrest::assert_that; - -fn setup() {} - -test!(bad1 { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "") - .file(".cargo/config", r#" - [target] - nonexistent-target = "foo" - "#); - assert_that(foo.cargo_process("build").arg("-v") - .arg("--target=nonexistent-target"), - execs().with_status(101).with_stderr("\ -expected table for configuration key `target.nonexistent-target`, but found string in [..]config -")); -}); - -test!(bad2 { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "") - .file(".cargo/config", r#" - [http] - proxy = 3.0 - "#); - assert_that(foo.cargo_process("publish").arg("-v"), - execs().with_status(101).with_stderr("\ -Couldn't load Cargo configuration - -Caused by: - failed to load TOML configuration from `[..]config` - -Caused by: - failed to parse key `http` - -Caused by: - failed to parse key `proxy` - -Caused by: - found TOML configuration value of unknown type `float` -")); -}); - -test!(bad3 { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "") - .file(".cargo/config", r#" - [http] - proxy = true - "#); - assert_that(foo.cargo_process("publish").arg("-v"), - execs().with_status(101).with_stderr("\ -invalid configuration for key `http.proxy` -expected a string, but found a boolean in [..]config -")); -}); - -test!(bad4 { - let foo = project("foo") - .file(".cargo/config", r#" - [cargo-new] - name = false - "#); - assert_that(foo.cargo_process("new").arg("-v").arg("foo"), - execs().with_status(101).with_stderr("\ -Failed to create project `foo` at `[..]` - -Caused by: - invalid configuration for key `cargo-new.name` -expected a string, but found a boolean in [..]config -")); -}); - -test!(bad5 { - let foo = project("foo") - .file(".cargo/config", r#" - foo = "" - "#) - .file("foo/.cargo/config", r#" - foo = 2 - "#); - foo.build(); - assert_that(foo.cargo("new") - .arg("-v").arg("foo").cwd(&foo.root().join("foo")), - execs().with_status(101).with_stderr("\ -Couldn't load Cargo configuration - -Caused by: - failed to merge key `foo` between files: - file 1: [..]foo[..]foo[..]config - file 2: [..]foo[..]config - -Caused by: - expected integer, but found string -")); -}); - -test!(bad_cargo_config_jobs { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "") - .file(".cargo/config", r#" - [build] - jobs = -1 - "#); - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(101).with_stderr("\ -build.jobs must be positive, but found -1 in [..] -")); -}); - -test!(default_cargo_config_jobs { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "") - .file(".cargo/config", r#" - [build] - jobs = 1 - "#); - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(0)); -}); - -test!(good_cargo_config_jobs { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "") - .file(".cargo/config", r#" - [build] - jobs = 4 - "#); - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(0)); -}); - -test!(invalid_global_config { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - foo = "0.1.0" - "#) - .file(".cargo/config", "4") - .file("src/lib.rs", ""); - - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(101).with_stderr("\ -Couldn't load Cargo configuration - -Caused by: - could not parse TOML configuration in `[..]config` - -Caused by: - could not parse input as TOML -[..]config:2:1 expected `=`, but found eof - -")); -}); - -test!(bad_cargo_lock { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("Cargo.lock", "") - .file("src/lib.rs", ""); - - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(101).with_stderr("\ -failed to parse lock file at: [..]Cargo.lock - -Caused by: - expected a section for the key `root` -")); -}); - -test!(bad_git_dependency { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - foo = { git = "file:.." } - "#) - .file("src/lib.rs", ""); - - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(101).with_stderr("\ -Unable to update file:/// - -Caused by: - failed to clone into: [..] - -Caused by: - [7] 'file:///' is not a valid local file URI -")); -}); - -test!(bad_crate_type { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [lib] - crate-type = ["bad_type", "rlib"] - "#) - .file("src/lib.rs", ""); - - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(0).with_stderr("\ -warning: crate-type \"bad_type\" was not one of lib|rlib|dylib|staticlib -")); -}); diff --git a/tests/test_bad_manifest_path.rs b/tests/test_bad_manifest_path.rs deleted file mode 100644 index 696dca438ad..00000000000 --- a/tests/test_bad_manifest_path.rs +++ /dev/null @@ -1,308 +0,0 @@ -use support::{project, execs, main_file, basic_bin_manifest}; -use hamcrest::{assert_that}; - -fn setup() {} - -fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process(command) - .arg("--manifest-path").arg(manifest_path_argument) - .cwd(p.root().parent().unwrap()), - execs().with_status(101) - .with_stderr("the manifest-path must be a path to a Cargo.toml file")); -} - -#[allow(deprecated)] // connect => join in 1.3 -fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) { - let p = project("foo"); - let expected_path = manifest_path_argument - .split("/").collect::>().connect("[..]"); - - assert_that(p.cargo_process(command) - .arg("--manifest-path").arg(manifest_path_argument) - .cwd(p.root().parent().unwrap()), - execs().with_status(101) - .with_stderr( - format!("manifest path `{}` does not exist", expected_path) - )); -} - -test!(bench_dir_containing_cargo_toml { - assert_not_a_cargo_toml("bench", "foo"); -}); - -test!(bench_dir_plus_file { - assert_not_a_cargo_toml("bench", "foo/bar"); -}); - -test!(bench_dir_plus_path { - assert_not_a_cargo_toml("bench", "foo/bar/baz"); -}); - -test!(bench_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml"); -}); - -test!(build_dir_containing_cargo_toml { - assert_not_a_cargo_toml("build", "foo"); -}); - -test!(build_dir_plus_file { - assert_not_a_cargo_toml("bench", "foo/bar"); -}); - -test!(build_dir_plus_path { - assert_not_a_cargo_toml("bench", "foo/bar/baz"); -}); - -test!(build_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("build", "foo/bar/baz/Cargo.toml"); -}); - -test!(clean_dir_containing_cargo_toml { - assert_not_a_cargo_toml("clean", "foo"); -}); - -test!(clean_dir_plus_file { - assert_not_a_cargo_toml("clean", "foo/bar"); -}); - -test!(clean_dir_plus_path { - assert_not_a_cargo_toml("clean", "foo/bar/baz"); -}); - -test!(clean_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml"); -}); - -test!(doc_dir_containing_cargo_toml { - assert_not_a_cargo_toml("doc", "foo"); -}); - -test!(doc_dir_plus_file { - assert_not_a_cargo_toml("doc", "foo/bar"); -}); - -test!(doc_dir_plus_path { - assert_not_a_cargo_toml("doc", "foo/bar/baz"); -}); - -test!(doc_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml"); -}); - -test!(fetch_dir_containing_cargo_toml { - assert_not_a_cargo_toml("fetch", "foo"); -}); - -test!(fetch_dir_plus_file { - assert_not_a_cargo_toml("fetch", "foo/bar"); -}); - -test!(fetch_dir_plus_path { - assert_not_a_cargo_toml("fetch", "foo/bar/baz"); -}); - -test!(fetch_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml"); -}); - -test!(generate_lockfile_dir_containing_cargo_toml { - assert_not_a_cargo_toml("generate-lockfile", "foo"); -}); - -test!(generate_lockfile_dir_plus_file { - assert_not_a_cargo_toml("generate-lockfile", "foo/bar"); -}); - -test!(generate_lockfile_dir_plus_path { - assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz"); -}); - -test!(generate_lockfile_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml"); -}); - -test!(package_dir_containing_cargo_toml { - assert_not_a_cargo_toml("package", "foo"); -}); - -test!(package_dir_plus_file { - assert_not_a_cargo_toml("package", "foo/bar"); -}); - -test!(package_dir_plus_path { - assert_not_a_cargo_toml("package", "foo/bar/baz"); -}); - -test!(package_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml"); -}); - -test!(pkgid_dir_containing_cargo_toml { - assert_not_a_cargo_toml("pkgid", "foo"); -}); - -test!(pkgid_dir_plus_file { - assert_not_a_cargo_toml("pkgid", "foo/bar"); -}); - -test!(pkgid_dir_plus_path { - assert_not_a_cargo_toml("pkgid", "foo/bar/baz"); -}); - -test!(pkgid_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml"); -}); - -test!(publish_dir_containing_cargo_toml { - assert_not_a_cargo_toml("publish", "foo"); -}); - -test!(publish_dir_plus_file { - assert_not_a_cargo_toml("publish", "foo/bar"); -}); - -test!(publish_dir_plus_path { - assert_not_a_cargo_toml("publish", "foo/bar/baz"); -}); - -test!(publish_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml"); -}); - -test!(read_manifest_dir_containing_cargo_toml { - assert_not_a_cargo_toml("read-manifest", "foo"); -}); - -test!(read_manifest_dir_plus_file { - assert_not_a_cargo_toml("read-manifest", "foo/bar"); -}); - -test!(read_manifest_dir_plus_path { - assert_not_a_cargo_toml("read-manifest", "foo/bar/baz"); -}); - -test!(read_manifest_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml"); -}); - -test!(run_dir_containing_cargo_toml { - assert_not_a_cargo_toml("run", "foo"); -}); - -test!(run_dir_plus_file { - assert_not_a_cargo_toml("run", "foo/bar"); -}); - -test!(run_dir_plus_path { - assert_not_a_cargo_toml("run", "foo/bar/baz"); -}); - -test!(run_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml"); -}); - -test!(rustc_dir_containing_cargo_toml { - assert_not_a_cargo_toml("rustc", "foo"); -}); - -test!(rustc_dir_plus_file { - assert_not_a_cargo_toml("rustc", "foo/bar"); -}); - -test!(rustc_dir_plus_path { - assert_not_a_cargo_toml("rustc", "foo/bar/baz"); -}); - -test!(rustc_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml"); -}); - -test!(test_dir_containing_cargo_toml { - assert_not_a_cargo_toml("test", "foo"); -}); - -test!(test_dir_plus_file { - assert_not_a_cargo_toml("test", "foo/bar"); -}); - -test!(test_dir_plus_path { - assert_not_a_cargo_toml("test", "foo/bar/baz"); -}); - -test!(test_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml"); -}); - -test!(update_dir_containing_cargo_toml { - assert_not_a_cargo_toml("update", "foo"); -}); - -test!(update_dir_plus_file { - assert_not_a_cargo_toml("update", "foo/bar"); -}); - -test!(update_dir_plus_path { - assert_not_a_cargo_toml("update", "foo/bar/baz"); -}); - -test!(update_dir_to_nonexistent_cargo_toml { - assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml"); -}); - -test!(verify_project_dir_containing_cargo_toml { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("verify-project") - .arg("--manifest-path").arg("foo") - .cwd(p.root().parent().unwrap()), - execs().with_status(1) - .with_stdout("\ -{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ - ")); -}); - -test!(verify_project_dir_plus_file { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("verify-project") - .arg("--manifest-path").arg("foo/bar") - .cwd(p.root().parent().unwrap()), - execs().with_status(1) - .with_stdout("\ -{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ - ")); -}); - -test!(verify_project_dir_plus_path { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("verify-project") - .arg("--manifest-path").arg("foo/bar/baz") - .cwd(p.root().parent().unwrap()), - execs().with_status(1) - .with_stdout("\ -{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ - ")); -}); - -test!(verify_project_dir_to_nonexistent_cargo_toml { - let p = project("foo"); - assert_that(p.cargo_process("verify-project") - .arg("--manifest-path").arg("foo/bar/baz/Cargo.toml") - .cwd(p.root().parent().unwrap()), - execs().with_status(1) - .with_stdout("\ -{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\ - ")); -}); diff --git a/tests/test_cargo.rs b/tests/test_cargo.rs deleted file mode 100644 index 04f55c6f29e..00000000000 --- a/tests/test_cargo.rs +++ /dev/null @@ -1,128 +0,0 @@ -use std::env; -use std::ffi::OsString; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::path::{Path, PathBuf}; -use std::str; -use cargo::util::process; - -use support::paths; -use support::{execs, project, cargo_dir, mkdir_recursive, ProjectBuilder}; -use hamcrest::{assert_that}; - -fn setup() { -} - -/// Add an empty file with executable flags (and platform-dependent suffix). -/// TODO: move this to `ProjectBuilder` if other cases using this emerge. -fn fake_executable(proj: ProjectBuilder, dir: &Path, name: &str) -> ProjectBuilder { - let path = proj.root().join(dir).join(&format!("{}{}", name, - env::consts::EXE_SUFFIX)); - mkdir_recursive(path.parent().unwrap()).unwrap(); - File::create(&path).unwrap(); - make_executable(&path); - return proj; - - #[cfg(unix)] - fn make_executable(p: &Path) { - use std::os::unix::prelude::*; - - let mut perms = fs::metadata(p).unwrap().permissions();; - let mode = perms.mode(); - perms.set_mode(mode | 0o111); - fs::set_permissions(p, perms).unwrap(); - } - #[cfg(windows)] - fn make_executable(_: &Path) {} -} - -fn path() -> Vec { - env::split_paths(&env::var_os("PATH").unwrap_or(OsString::new())).collect() -} - -test!(list_commands_looks_at_path { - let proj = project("list-non-overlapping"); - let proj = fake_executable(proj, &Path::new("path-test"), "cargo-1"); - let mut pr = process(&cargo_dir().join("cargo")).unwrap(); - pr.cwd(&proj.root()) - .env("HOME", &paths::home()); - - let mut path = path(); - path.push(proj.root().join("path-test")); - let path = env::join_paths(path.iter()).unwrap(); - let output = pr.arg("-v").arg("--list") - .env("PATH", &path); - let output = output.exec_with_output().unwrap(); - let output = str::from_utf8(&output.stdout).unwrap(); - assert!(output.contains("\n 1\n"), "missing 1: {}", output); -}); - -test!(find_closest_biuld_to_build { - let mut pr = process(&cargo_dir().join("cargo")).unwrap(); - pr.arg("biuld").cwd(&paths::root()).env("HOME", &paths::home()); - - assert_that(pr, - execs().with_status(127) - .with_stderr("No such subcommand - -Did you mean `build`? - -")); -}); - -// if a subcommand is more than 3 edit distance away, we don't make a suggestion -test!(find_closest_dont_correct_nonsense { - let mut pr = process(&cargo_dir().join("cargo")).unwrap(); - pr.arg("asdf").cwd(&paths::root()).env("HOME", &paths::home()); - - assert_that(pr, - execs().with_status(127) - .with_stderr("No such subcommand -")); -}); - -test!(override_cargo_home { - let root = paths::root(); - let my_home = root.join("my_home"); - fs::create_dir(&my_home).unwrap(); - File::create(&my_home.join("config")).unwrap().write_all(br#" - [cargo-new] - name = "foo" - email = "bar" - git = false - "#).unwrap(); - - assert_that(process(&cargo_dir().join("cargo")).unwrap() - .arg("new").arg("foo") - .cwd(&paths::root()) - .env("USER", "foo") - .env("HOME", &paths::home()) - .env("CARGO_HOME", &my_home), - execs().with_status(0)); - - let toml = paths::root().join("foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"authors = ["foo "]"#)); -}); - -test!(cargo_help { - assert_that(process(&cargo_dir().join("cargo")).unwrap(), - execs().with_status(0)); - assert_that(process(&cargo_dir().join("cargo")).unwrap().arg("help"), - execs().with_status(0)); - assert_that(process(&cargo_dir().join("cargo")).unwrap().arg("-h"), - execs().with_status(0)); - assert_that(process(&cargo_dir().join("cargo")).unwrap() - .arg("help").arg("build"), - execs().with_status(0)); - assert_that(process(&cargo_dir().join("cargo")).unwrap() - .arg("build").arg("-h"), - execs().with_status(0)); - assert_that(process(&cargo_dir().join("cargo")).unwrap() - .arg("help").arg("-h"), - execs().with_status(0)); - assert_that(process(&cargo_dir().join("cargo")).unwrap() - .arg("help").arg("help"), - execs().with_status(0)); -}); diff --git a/tests/test_cargo_bench.rs b/tests/test_cargo_bench.rs deleted file mode 100644 index 9d0123ec6c6..00000000000 --- a/tests/test_cargo_bench.rs +++ /dev/null @@ -1,924 +0,0 @@ -use std::str; - -use support::{project, execs, basic_bin_manifest, basic_lib_manifest}; -use support::{COMPILING, FRESH, RUNNING}; -use support::paths::CargoPathExt; -use hamcrest::{assert_that, existing_file}; -use cargo::util::process; - -fn setup() {} - -test!(cargo_bench_simple { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", r#" - #![feature(test)] - extern crate test; - - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[bench] - fn bench_hello(_b: &mut test::Bencher) { - assert_eq!(hello(), "hello") - }"#); - - assert_that(p.cargo_process("build"), execs()); - assert_that(&p.bin("foo"), existing_file()); - - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout("hello\n")); - - assert_that(p.cargo("bench"), - execs().with_stdout(&format!("\ -{} foo v0.5.0 ({}) -{} target[..]release[..]foo-[..] - -running 1 test -test bench_hello ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - COMPILING, p.url(), - RUNNING))); -}); - -test!(bench_tarname { - if !::is_nightly() { return } - - let prj = project("foo") - .file("Cargo.toml" , r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("benches/bin1.rs", r#" - #![feature(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { }"#) - .file("benches/bin2.rs", r#" - #![feature(test)] - extern crate test; - #[bench] fn run2(_ben: &mut test::Bencher) { }"#); - - let expected_stdout = format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]release[..]bin2[..] - -running 1 test -test run2 ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, - running = RUNNING, - dir = prj.url()); - - assert_that(prj.cargo_process("bench").arg("--bench").arg("bin2"), - execs().with_status(0).with_stdout(expected_stdout)); -}); - -test!(cargo_bench_verbose { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", r#" - #![feature(test)] - extern crate test; - fn main() {} - #[bench] fn bench_hello(_b: &mut test::Bencher) {} - "#); - - assert_that(p.cargo_process("bench").arg("-v").arg("hello"), - execs().with_stdout(&format!("\ -{compiling} foo v0.5.0 ({url}) -{running} `rustc src[..]foo.rs [..]` -{running} `[..]target[..]release[..]foo-[..] hello --bench` - -running 1 test -test bench_hello ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, url = p.url(), running = RUNNING))); -}); - -test!(many_similar_names { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - #![feature(test)] - extern crate test; - pub fn foo() {} - #[bench] fn lib_bench(_b: &mut test::Bencher) {} - ") - .file("src/main.rs", " - #![feature(test)] - extern crate foo; - extern crate test; - fn main() {} - #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() } - ") - .file("benches/foo.rs", r#" - #![feature(test)] - extern crate foo; - extern crate test; - #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() } - "#); - - let output = p.cargo_process("bench").exec_with_output().unwrap(); - let output = str::from_utf8(&output.stdout).unwrap(); - assert!(output.contains("test bin_bench"), "bin_bench missing\n{}", output); - assert!(output.contains("test lib_bench"), "lib_bench missing\n{}", output); - assert!(output.contains("test bench_bench"), "bench_bench missing\n{}", output); -}); - -test!(cargo_bench_failing_test { - if !::is_nightly() { return } - if !::can_panic() { return } - - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", r#" - #![feature(test)] - extern crate test; - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[bench] - fn bench_hello(_b: &mut test::Bencher) { - assert_eq!(hello(), "nope") - }"#); - - assert_that(p.cargo_process("build"), execs()); - assert_that(&p.bin("foo"), existing_file()); - - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout("hello\n")); - - assert_that(p.cargo("bench"), - execs().with_stdout(&format!("\ -{} foo v0.5.0 ({}) -{} target[..]release[..]foo-[..] - -running 1 test -test bench_hello ... ", - COMPILING, p.url(), RUNNING)) - .with_stderr("\ -thread '
' panicked at 'assertion failed: \ - `(left == right)` (left: \ - `\"hello\"`, right: `\"nope\"`)', src[..]foo.rs:14 - -") - .with_status(101)); -}); - -test!(bench_with_lib_dep { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "baz" - path = "src/main.rs" - "#) - .file("src/lib.rs", r#" - #![feature(test)] - extern crate test; - /// - /// ```rust - /// extern crate foo; - /// fn main() { - /// println!("{}", foo::foo()); - /// } - /// ``` - /// - pub fn foo(){} - #[bench] fn lib_bench(_b: &mut test::Bencher) {} - "#) - .file("src/main.rs", " - #![feature(test)] - extern crate foo; - extern crate test; - - fn main() {} - - #[bench] - fn bin_bench(_b: &mut test::Bencher) {} - "); - - assert_that(p.cargo_process("bench"), - execs().with_stdout(&format!("\ -{} foo v0.0.1 ({}) -{running} target[..]release[..]baz-[..] - -running 1 test -test bin_bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -{running} target[..]release[..]foo-[..] - -running 1 test -test lib_bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - COMPILING, p.url(), running = RUNNING))) -}); - -test!(bench_with_deep_lib_dep { - if !::is_nightly() { return } - - let p = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies.foo] - path = "../foo" - "#) - .file("src/lib.rs", " - #![feature(test)] - extern crate foo; - extern crate test; - #[bench] - fn bar_bench(_b: &mut test::Bencher) { - foo::foo(); - } - "); - let p2 = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - #![feature(test)] - extern crate test; - - pub fn foo() {} - - #[bench] - fn foo_bench(_b: &mut test::Bencher) {} - "); - - p2.build(); - assert_that(p.cargo_process("bench"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{compiling} bar v0.0.1 ({dir}) -{running} target[..] - -running 1 test -test bar_bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); -}); - -test!(external_bench_explicit { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bench]] - name = "bench" - path = "src/bench.rs" - "#) - .file("src/lib.rs", r#" - #![feature(test)] - extern crate test; - pub fn get_hello() -> &'static str { "Hello" } - - #[bench] - fn internal_bench(_b: &mut test::Bencher) {} - "#) - .file("src/bench.rs", r#" - #![feature(test)] - extern crate foo; - extern crate test; - - #[bench] - fn external_bench(_b: &mut test::Bencher) {} - "#); - - assert_that(p.cargo_process("bench"), - execs().with_stdout(&format!("\ -{} foo v0.0.1 ({}) -{running} target[..]release[..]bench-[..] - -running 1 test -test external_bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -{running} target[..]release[..]foo-[..] - -running 1 test -test internal_bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - COMPILING, p.url(), running = RUNNING))) -}); - -test!(external_bench_implicit { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", r#" - #![feature(test)] - extern crate test; - - pub fn get_hello() -> &'static str { "Hello" } - - #[bench] - fn internal_bench(_b: &mut test::Bencher) {} - "#) - .file("benches/external.rs", r#" - #![feature(test)] - extern crate foo; - extern crate test; - - #[bench] - fn external_bench(_b: &mut test::Bencher) {} - "#); - - assert_that(p.cargo_process("bench"), - execs().with_stdout(&format!("\ -{} foo v0.0.1 ({}) -{running} target[..]release[..]external-[..] - -running 1 test -test external_bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -{running} target[..]release[..]foo-[..] - -running 1 test -test internal_bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - COMPILING, p.url(), running = RUNNING))) -}); - -test!(dont_run_examples { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", r#" - "#) - .file("examples/dont-run-me-i-will-fail.rs", r#" - fn main() { panic!("Examples should not be run by 'cargo test'"); } - "#); - assert_that(p.cargo_process("bench"), - execs().with_status(0)); -}); - -test!(pass_through_command_line { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - #![feature(test)] - extern crate test; - - #[bench] fn foo(_b: &mut test::Bencher) {} - #[bench] fn bar(_b: &mut test::Bencher) {} - "); - - assert_that(p.cargo_process("bench").arg("bar"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]release[..]foo-[..] - -running 1 test -test bar ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); - - assert_that(p.cargo("bench").arg("foo"), - execs().with_status(0) - .with_stdout(&format!("\ -{running} target[..]release[..]foo-[..] - -running 1 test -test foo ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", running = RUNNING))); -}); - -// Regression test for running cargo-bench twice with -// tests in an rlib -test!(cargo_bench_twice { - if !::is_nightly() { return } - - let p = project("test_twice") - .file("Cargo.toml", &basic_lib_manifest("test_twice")) - .file("src/test_twice.rs", r#" - #![crate_type = "rlib"] - #![feature(test)] - - extern crate test; - - #[bench] - fn dummy_bench(b: &mut test::Bencher) { } - "#); - - p.cargo_process("build"); - - for _ in 0..2 { - assert_that(p.cargo("bench"), - execs().with_status(0)); - } -}); - -test!(lib_bin_same_name { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - [[bin]] - name = "foo" - "#) - .file("src/lib.rs", " - #![feature(test)] - extern crate test; - #[bench] fn lib_bench(_b: &mut test::Bencher) {} - ") - .file("src/main.rs", " - #![feature(test)] - extern crate foo; - extern crate test; - - #[bench] - fn bin_bench(_b: &mut test::Bencher) {} - "); - - assert_that(p.cargo_process("bench"), - execs().with_stdout(&format!("\ -{} foo v0.0.1 ({}) -{running} target[..]release[..]foo-[..] - -running 1 test -test [..] ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -{running} target[..]release[..]foo-[..] - -running 1 test -test [..] ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - COMPILING, p.url(), running = RUNNING))) -}); - -test!(lib_with_standard_name { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - #![feature(test)] - extern crate test; - - /// ``` - /// syntax::foo(); - /// ``` - pub fn foo() {} - - #[bench] - fn foo_bench(_b: &mut test::Bencher) {} - ") - .file("benches/bench.rs", " - #![feature(test)] - extern crate syntax; - extern crate test; - - #[bench] - fn bench(_b: &mut test::Bencher) { syntax::foo() } - "); - - assert_that(p.cargo_process("bench"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} syntax v0.0.1 ({dir}) -{running} target[..]release[..]bench-[..] - -running 1 test -test bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -{running} target[..]release[..]syntax-[..] - -running 1 test -test foo_bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); -}); - -test!(lib_with_standard_name2 { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - name = "syntax" - bench = false - doctest = false - "#) - .file("src/lib.rs", " - pub fn foo() {} - ") - .file("src/main.rs", " - #![feature(test)] - extern crate syntax; - extern crate test; - - fn main() {} - - #[bench] - fn bench(_b: &mut test::Bencher) { syntax::foo() } - "); - - assert_that(p.cargo_process("bench"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} syntax v0.0.1 ({dir}) -{running} target[..]release[..]syntax-[..] - -running 1 test -test bench ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); -}); - -test!(bench_dylib { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate_type = ["dylib"] - - [dependencies.bar] - path = "bar" - "#) - .file("src/lib.rs", r#" - #![feature(test)] - extern crate bar as the_bar; - extern crate test; - - pub fn bar() { the_bar::baz(); } - - #[bench] - fn foo(_b: &mut test::Bencher) {} - "#) - .file("benches/bench.rs", r#" - #![feature(test)] - extern crate foo as the_foo; - extern crate test; - - #[bench] - fn foo(_b: &mut test::Bencher) { the_foo::bar(); } - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - crate_type = ["dylib"] - "#) - .file("bar/src/lib.rs", " - pub fn baz() {} - "); - - assert_that(p.cargo_process("bench").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} bar v0.0.1 ({dir}) -{running} [..] -C opt-level=3 [..] -{compiling} foo v0.0.1 ({dir}) -{running} [..] -C opt-level=3 [..] -{running} [..] -C opt-level=3 [..] -{running} [..] -C opt-level=3 [..] -{running} [..]target[..]release[..]bench-[..] - -running 1 test -test foo ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -{running} [..]target[..]release[..]foo-[..] - -running 1 test -test foo ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); - p.root().move_into_the_past().unwrap(); - assert_that(p.cargo("bench").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{fresh} bar v0.0.1 ({dir}) -{fresh} foo v0.0.1 ({dir}) -{running} [..]target[..]release[..]bench-[..] - -running 1 test -test foo ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -{running} [..]target[..]release[..]foo-[..] - -running 1 test -test foo ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - fresh = FRESH, running = RUNNING, - dir = p.url()))); -}); - -test!(bench_twice_with_build_cmd { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", " - #![feature(test)] - extern crate test; - #[bench] - fn foo(_b: &mut test::Bencher) {} - "); - - assert_that(p.cargo_process("bench"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]release[..]foo-[..] - -running 1 test -test foo ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); - - assert_that(p.cargo("bench"), - execs().with_status(0) - .with_stdout(&format!("\ -{running} target[..]release[..]foo-[..] - -running 1 test -test foo ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - running = RUNNING))); -}); - -test!(bench_with_examples { - if !::is_nightly() { return } - - let p = project("testbench") - .file("Cargo.toml", r#" - [package] - name = "testbench" - version = "6.6.6" - authors = [] - - [[example]] - name = "teste1" - - [[bench]] - name = "testb1" - "#) - .file("src/lib.rs", r#" - #![feature(test)] - extern crate test; - use test::Bencher; - - pub fn f1() { - println!("f1"); - } - - pub fn f2() {} - - #[bench] - fn bench_bench1(_b: &mut Bencher) { - f2(); - } - "#) - .file("benches/testb1.rs", " - #![feature(test)] - extern crate testbench; - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_bench2(_b: &mut Bencher) { - testbench::f2(); - } - ") - .file("examples/teste1.rs", r#" - extern crate testbench; - - fn main() { - println!("example1"); - testbench::f1(); - } - "#); - - assert_that(p.cargo_process("bench").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} testbench v6.6.6 ({url}) -{running} `rustc src[..]lib.rs --crate-name testbench --crate-type lib [..]` -{running} `rustc src[..]lib.rs --crate-name testbench --crate-type lib [..]` -{running} `rustc benches[..]testb1.rs --crate-name testb1 --crate-type bin \ - [..] --test [..]` -{running} `{dir}[..]target[..]release[..]testb1-[..] --bench` - -running 1 test -test bench_bench2 ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -{running} `{dir}[..]target[..]release[..]testbench-[..] --bench` - -running 1 test -test bench_bench1 ... bench: [..] 0 ns/iter (+/- 0) - -test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured - -", - compiling = COMPILING, - running = RUNNING, - dir = p.root().display(), - url = p.url()))); -}); - -test!(test_a_bench { - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - authors = [] - version = "0.1.0" - - [lib] - name = "foo" - test = false - doctest = false - - [[bench]] - name = "b" - test = true - "#) - .file("src/lib.rs", "") - .file("benches/b.rs", r#" - #[test] - fn foo() {} - "#); - - assert_that(p.cargo_process("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.1.0 ([..]) -{running} target[..]debug[..]b-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING))); -}); diff --git a/tests/test_cargo_build_auth.rs b/tests/test_cargo_build_auth.rs deleted file mode 100644 index 0416c477cd8..00000000000 --- a/tests/test_cargo_build_auth.rs +++ /dev/null @@ -1,203 +0,0 @@ -use std::collections::HashSet; -use std::io::prelude::*; -use std::net::TcpListener; -use std::thread; - -use bufstream::BufStream; -use git2; - -use support::{project, execs, UPDATING}; -use support::paths; -use hamcrest::assert_that; - -fn setup() { -} - -// Test that HTTP auth is offered from `credential.helper` -test!(http_auth_offered { - let a = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = a.local_addr().unwrap(); - - fn headers(rdr: &mut BufRead) -> HashSet { - let valid = ["GET", "Authorization", "Accept", "User-Agent"]; - rdr.lines().map(|s| s.unwrap()) - .take_while(|s| s.len() > 2) - .map(|s| s.trim().to_string()) - .filter(|s| { - valid.iter().any(|prefix| s.starts_with(*prefix)) - }) - .collect() - } - - let t = thread::spawn(move|| { - let mut s = BufStream::new(a.accept().unwrap().0); - let req = headers(&mut s); - s.write_all(b"\ - HTTP/1.1 401 Unauthorized\r\n\ - WWW-Authenticate: Basic realm=\"wheee\"\r\n - \r\n\ - ").unwrap(); - assert_eq!(req, vec![ - "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", - "Accept: */*", - "User-Agent: git/1.0 (libgit2 0.22.0)", - ].into_iter().map(|s| s.to_string()).collect()); - drop(s); - - let mut s = BufStream::new(a.accept().unwrap().0); - let req = headers(&mut s); - s.write_all(b"\ - HTTP/1.1 401 Unauthorized\r\n\ - WWW-Authenticate: Basic realm=\"wheee\"\r\n - \r\n\ - ").unwrap(); - assert_eq!(req, vec![ - "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", - "Authorization: Basic Zm9vOmJhcg==", - "Accept: */*", - "User-Agent: git/1.0 (libgit2 0.22.0)", - ].into_iter().map(|s| s.to_string()).collect()); - }); - - let script = project("script") - .file("Cargo.toml", r#" - [project] - name = "script" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() { - println!("username=foo"); - println!("password=bar"); - } - "#); - assert_that(script.cargo_process("build").arg("-v"), - execs().with_status(0)); - let script = script.bin("script"); - - let config = paths::home().join(".gitconfig"); - let mut config = git2::Config::open(&config).unwrap(); - config.set_str("credential.helper", - &script.display().to_string()).unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "http://127.0.0.1:{}/foo/bar" - "#, addr.port())) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stdout(&format!("\ -{updating} git repository `http://{addr}/foo/bar` -", - updating = UPDATING, - addr = addr, - )) - .with_stderr(&format!("\ -Unable to update http://{addr}/foo/bar - -Caused by: - failed to clone into: [..] - -Caused by: - failed to authenticate when downloading repository - -To learn more, run the command again with --verbose. -", - addr = addr))); - - t.join().ok().unwrap(); -}); - -// Boy, sure would be nice to have a TLS implementation in rust! -test!(https_something_happens { - let a = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = a.local_addr().unwrap(); - let t = thread::spawn(move|| { - drop(a.accept().unwrap()); - }); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "https://127.0.0.1:{}/foo/bar" - "#, addr.port())) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101).with_stdout(&format!("\ -{updating} git repository `https://{addr}/foo/bar` -", - updating = UPDATING, - addr = addr, - )) - .with_stderr(&format!("\ -Unable to update https://{addr}/foo/bar - -Caused by: - failed to clone into: [..] - -Caused by: - {errmsg} -", - addr = addr, - errmsg = if cfg!(windows) { - "[[..]] failed to send request: [..]\n" - } else { - "[[..]] SSL error: [..]" - }))); - - t.join().ok().unwrap(); -}); - -// Boy, sure would be nice to have an SSH implementation in rust! -test!(ssh_something_happens { - let a = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = a.local_addr().unwrap(); - let t = thread::spawn(move|| { - drop(a.accept().unwrap()); - }); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "ssh://127.0.0.1:{}/foo/bar" - "#, addr.port())) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101).with_stdout(&format!("\ -{updating} git repository `ssh://{addr}/foo/bar` -", - updating = UPDATING, - addr = addr, - )) - .with_stderr(&format!("\ -Unable to update ssh://{addr}/foo/bar - -Caused by: - failed to clone into: [..] - -Caused by: - [[..]] Failed to start SSH session: Failed getting banner -", - addr = addr))); - t.join().ok().unwrap(); -}); diff --git a/tests/test_cargo_build_lib.rs b/tests/test_cargo_build_lib.rs deleted file mode 100644 index 476d2f14bdf..00000000000 --- a/tests/test_cargo_build_lib.rs +++ /dev/null @@ -1,84 +0,0 @@ -use std::path::MAIN_SEPARATOR as SEP; -use support::{basic_bin_manifest, execs, project, ProjectBuilder}; -use support::{COMPILING, RUNNING}; -use hamcrest::{assert_that}; - -fn setup() { -} - -fn verbose_output_for_lib(p: &ProjectBuilder) -> String { - format!("\ -{compiling} {name} v{version} ({url}) -{running} `rustc src{sep}lib.rs --crate-name {name} --crate-type lib -g \ - --out-dir {dir}{sep}target{sep}debug \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}debug \ - -L dependency={dir}{sep}target{sep}debug{sep}deps` -", - running = RUNNING, compiling = COMPILING, sep = SEP, - dir = p.root().display(), url = p.url(), - name = "foo", version = "0.0.1") -} - -test!(build_lib_only { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - - name = "foo" - version = "0.0.1" - authors = ["wycats@example.com"] - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("src/lib.rs", r#" "#); - - assert_that(p.cargo_process("build").arg("--lib").arg("-v"), - execs() - .with_status(0) - .with_stdout(verbose_output_for_lib(&p))); -}); - - -test!(build_with_no_lib { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", r#" - fn main() {} - "#); - - assert_that(p.cargo_process("build").arg("--lib"), - execs().with_status(101) - .with_stderr("no library targets found")); -}); - -test!(build_with_relative_cargo_home_path { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - - name = "foo" - version = "0.0.1" - authors = ["wycats@example.com"] - - [dependencies] - - "test-dependency" = { path = "src/test_dependency" } - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("src/test_dependency/src/lib.rs", r#" "#) - .file("src/test_dependency/Cargo.toml", r#" - [package] - - name = "test-dependency" - version = "0.0.1" - authors = ["wycats@example.com"] - "#); - - assert_that(p.cargo_process("build").env("CARGO_HOME", "./cargo_home/"), - execs() - .with_status(0)); -}); diff --git a/tests/test_cargo_clean.rs b/tests/test_cargo_clean.rs deleted file mode 100644 index 9ccd8f7314f..00000000000 --- a/tests/test_cargo_clean.rs +++ /dev/null @@ -1,32 +0,0 @@ -use support::{project, execs, main_file, basic_bin_manifest}; -use hamcrest::{assert_that, existing_dir, is_not}; - -fn setup() { -} - -test!(cargo_clean_simple { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(&p.build_dir(), existing_dir()); - - assert_that(p.cargo("clean"), - execs().with_status(0)); - assert_that(&p.build_dir(), is_not(existing_dir())); -}); - -test!(different_dir { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .file("src/bar/a.rs", ""); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(&p.build_dir(), existing_dir()); - - assert_that(p.cargo("clean").cwd(&p.root().join("src")), - execs().with_status(0).with_stdout("")); - assert_that(&p.build_dir(), is_not(existing_dir())); -}); diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs deleted file mode 100644 index ca147db349b..00000000000 --- a/tests/test_cargo_compile.rs +++ /dev/null @@ -1,1886 +0,0 @@ -use std::env; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::thread; -use tempdir::TempDir; - -use support::{project, execs, main_file, basic_bin_manifest}; -use support::{COMPILING, RUNNING, ProjectBuilder}; -use hamcrest::{assert_that, existing_file, is_not}; -use support::paths::CargoPathExt; -use cargo::util::process; - -fn setup() { -} - -test!(cargo_compile_simple { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("build"), execs()); - assert_that(&p.bin("foo"), existing_file()); - - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout("i am foo\n")); -}); - -test!(cargo_compile_manifest_path { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("build") - .arg("--manifest-path").arg("foo/Cargo.toml") - .cwd(p.root().parent().unwrap()), - execs().with_status(0)); - assert_that(&p.bin("foo"), existing_file()); -}); - -test!(cargo_compile_with_invalid_manifest { - let p = project("foo") - .file("Cargo.toml", ""); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - No `package` or `project` section found. -")) -}); - -test!(cargo_compile_with_invalid_manifest2 { - let p = project("foo") - .file("Cargo.toml", r" - [project] - foo = bar - "); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - could not parse input as TOML -Cargo.toml:3:19-3:20 expected a value - -")) -}); - -test!(cargo_compile_with_invalid_manifest3 { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/Cargo.toml", "a = bar"); - - assert_that(p.cargo_process("build").arg("--manifest-path") - .arg("src/Cargo.toml"), - execs() - .with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - could not parse input as TOML\n\ -src[..]Cargo.toml:1:5-1:6 expected a value\n\n")) -}); - -test!(cargo_compile_with_invalid_version { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - authors = [] - version = "1.0" - "#); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - cannot parse '1.0' as a semver for the key `project.version` -")) - -}); - -test!(cargo_compile_with_invalid_package_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "" - authors = [] - version = "0.0.0" - "#); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - package name cannot be an empty string. -")) -}); - -test!(cargo_compile_with_invalid_bin_target_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "" - "#); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - binary target names cannot be empty. -")) -}); - -test!(cargo_compile_with_forbidden_bin_target_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "build" - "#); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - the binary target name `build` is forbidden -")) -}); - -test!(cargo_compile_with_invalid_lib_target_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [lib] - name = "" - "#); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - library target names cannot be empty. -")) -}); - -test!(cargo_compile_without_manifest { - let tmpdir = TempDir::new("cargo").unwrap(); - let p = ProjectBuilder::new("foo", tmpdir.path().to_path_buf()); - - assert_that(p.cargo_process("build"), - execs().with_status(101) - .with_stderr("\ -Could not find `Cargo.toml` in `[..]` or any parent directory -")); -}); - -test!(cargo_compile_with_invalid_code { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", "invalid rust code!"); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr("\ -src[..]foo.rs:1:1: 1:8 error: expected item[..]found `invalid` -src[..]foo.rs:1 invalid rust code! - ^~~~~~~ -Could not compile `foo`. - -To learn more, run the command again with --verbose.\n")); - assert_that(&p.root().join("Cargo.lock"), existing_file()); -}); - -test!(cargo_compile_with_invalid_code_in_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - [dependencies.baz] - path = "../baz" - "#) - .file("src/main.rs", "invalid rust code!"); - let bar = project("bar") - .file("Cargo.toml", &basic_bin_manifest("bar")) - .file("src/lib.rs", "invalid rust code!"); - let baz = project("baz") - .file("Cargo.toml", &basic_bin_manifest("baz")) - .file("src/lib.rs", "invalid rust code!"); - bar.build(); - baz.build(); - assert_that(p.cargo_process("build"), execs().with_status(101)); -}); - -test!(cargo_compile_with_warnings_in_the_root_package { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", "fn main() {} fn dead() {}"); - - assert_that(p.cargo_process("build"), - execs() - .with_stderr("\ -src[..]foo.rs:1:14: 1:26 warning: function is never used: `dead`, \ - #[warn(dead_code)] on by default -src[..]foo.rs:1 fn main() {} fn dead() {} -[..] ^~~~~~~~~~~~ -")); -}); - -test!(cargo_compile_with_warnings_in_a_dep_package { - let mut p = project("foo"); - - p = p - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - - [[bin]] - - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "bar" - "#) - .file("bar/src/bar.rs", r#" - pub fn gimme() -> &'static str { - "test passed" - } - - fn dead() {} - "#); - - assert_that(p.cargo_process("build"), - execs() - .with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url())) - .with_stderr("\ -[..]warning: function is never used: `dead`[..] -[..]fn dead() {} -[..]^~~~~~~~~~~~ -")); - - assert_that(&p.bin("foo"), existing_file()); - - assert_that( - process(&p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n")); -}); - -test!(cargo_compile_with_nested_deps_inferred { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = 'bar' - - [[bin]] - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - path = "../baz" - "#) - .file("bar/src/lib.rs", r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#) - .file("baz/Cargo.toml", r#" - [project] - - name = "baz" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("baz/src/lib.rs", r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#); - - p.cargo_process("build") - .exec_with_output() - .unwrap(); - - assert_that(&p.bin("foo"), existing_file()); - - assert_that( - process(&p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n")); -}); - -test!(cargo_compile_with_nested_deps_correct_bin { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - - [[bin]] - name = "foo" - "#) - .file("src/main.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - path = "../baz" - "#) - .file("bar/src/lib.rs", r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#) - .file("baz/Cargo.toml", r#" - [project] - - name = "baz" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("baz/src/lib.rs", r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#); - - p.cargo_process("build") - .exec_with_output() - .unwrap(); - - assert_that(&p.bin("foo"), existing_file()); - - assert_that( - process(&p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n")); -}); - -test!(cargo_compile_with_nested_deps_shorthand { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - - [[bin]] - - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - path = "../baz" - - [lib] - - name = "bar" - "#) - .file("bar/src/bar.rs", r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#) - .file("baz/Cargo.toml", r#" - [project] - - name = "baz" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "baz" - "#) - .file("baz/src/baz.rs", r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#); - - p.cargo_process("build") - .exec_with_output() - .unwrap(); - - assert_that(&p.bin("foo"), existing_file()); - - assert_that( - process(&p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n")); -}); - -test!(cargo_compile_with_nested_deps_longhand { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - version = "0.5.0" - - [[bin]] - - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - path = "../baz" - version = "0.5.0" - - [lib] - - name = "bar" - "#) - .file("bar/src/bar.rs", r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#) - .file("baz/Cargo.toml", r#" - [project] - - name = "baz" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "baz" - "#) - .file("baz/src/baz.rs", r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#); - - assert_that(p.cargo_process("build"), execs()); - - assert_that(&p.bin("foo"), existing_file()); - - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n")); -}); - -// Check that Cargo gives a sensible error if a dependency can't be found -// because of a name mismatch. -test!(cargo_compile_with_dep_name_mismatch { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - - name = "foo" - version = "0.0.1" - authors = ["wycats@example.com"] - - [[bin]] - - name = "foo" - - [dependencies.notquitebar] - - path = "bar" - "#) - .file("src/foo.rs", &main_file(r#""i am foo""#, &["bar"])) - .file("bar/Cargo.toml", &basic_bin_manifest("bar")) - .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[])); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr(&format!( -r#"no matching package named `notquitebar` found (required by `foo`) -location searched: {proj_dir} -version required: * -"#, proj_dir = p.url()))); -}); - -test!(compile_path_dep_then_change_version { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - - File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#" - [package] - name = "bar" - version = "0.0.2" - authors = [] - "#).unwrap(); - - assert_that(p.cargo("build"), - execs().with_status(101).with_stderr("\ -no matching package named `bar` found (required by `foo`) -location searched: [..] -version required: = 0.0.1 -versions found: 0.0.2 -consider running `cargo update` to update a path dependency's locked version -")); -}); - -// test!(compiling_project_with_invalid_manifest) - -test!(crate_version_env_vars { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.1-alpha.1" - authors = ["wycats@example.com"] - "#) - .file("src/main.rs", r#" - extern crate foo; - - static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR"); - static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR"); - static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH"); - static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE"); - static VERSION: &'static str = env!("CARGO_PKG_VERSION"); - static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR"); - - fn main() { - let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR, - VERSION_MINOR, VERSION_PATCH, VERSION_PRE, - CARGO_MANIFEST_DIR); - assert_eq!(s, foo::version()); - println!("{}", s); - assert_eq!(s, VERSION); - } - "#) - .file("src/lib.rs", r#" - pub fn version() -> String { - format!("{}-{}-{} @ {} in {}", - env!("CARGO_PKG_VERSION_MAJOR"), - env!("CARGO_PKG_VERSION_MINOR"), - env!("CARGO_PKG_VERSION_PATCH"), - env!("CARGO_PKG_VERSION_PRE"), - env!("CARGO_MANIFEST_DIR")) - } - "#); - - println!("build"); - assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); - - println!("bin"); - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout(&format!("0-5-1 @ alpha.1 in {}\n", - p.root().display()))); - - println!("test"); - assert_that(p.cargo("test").arg("-v"), - execs().with_status(0)); -}); - -// this is testing that src/.rs still works (for now) -test!(many_crate_types_old_style_lib_location { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "foo" - crate_type = ["rlib", "dylib"] - "#) - .file("src/foo.rs", r#" - pub fn foo() {} - "#); - assert_that(p.cargo_process("build"), execs().with_status(0)); - - assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); - let fname = format!("{}foo{}", env::consts::DLL_PREFIX, - env::consts::DLL_SUFFIX); - assert_that(&p.root().join("target/debug").join(&fname), existing_file()); -}); - -test!(many_crate_types_correct { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "foo" - crate_type = ["rlib", "dylib"] - "#) - .file("src/lib.rs", r#" - pub fn foo() {} - "#); - assert_that(p.cargo_process("build"), - execs().with_status(0)); - - assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); - let fname = format!("{}foo{}", env::consts::DLL_PREFIX, - env::consts::DLL_SUFFIX); - assert_that(&p.root().join("target/debug").join(&fname), existing_file()); -}); - -test!(unused_keys { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - bulid = "foo" - - [lib] - - name = "foo" - "#) - .file("src/foo.rs", r#" - pub fn foo() {} - "#); - assert_that(p.cargo_process("build"), - execs().with_status(0) - .with_stderr("unused manifest key: project.bulid\n")); - - let mut p = project("bar"); - p = p - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "foo" - build = "foo" - "#) - .file("src/foo.rs", r#" - pub fn foo() {} - "#); - assert_that(p.cargo_process("build"), - execs().with_status(0) - .with_stderr("unused manifest key: lib.build\n")); -}); - -test!(self_dependency { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [dependencies.test] - - path = "." - - [lib] - - name = "test" - "#) - .file("src/test.rs", "fn main() {}"); - assert_that(p.cargo_process("build"), - execs().with_status(101) - .with_stderr("\ -cyclic package dependency: package `test v0.0.0 ([..])` depends on itself -")); -}); - -test!(ignore_broken_symlinks { - // windows and symlinks don't currently agree that well - if cfg!(windows) { return } - - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .symlink("Notafile", "bar"); - - assert_that(p.cargo_process("build"), execs()); - assert_that(&p.bin("foo"), existing_file()); - - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout("i am foo\n")); -}); - -test!(missing_lib_and_bin { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - "#); - assert_that(p.cargo_process("build"), - execs().with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]Cargo.toml` - -Caused by: - no targets specified in the manifest - either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n")); -}); - -test!(lto_build { - // FIXME: currently this hits a linker bug on 32-bit MSVC - if cfg!(all(target_env = "msvc", target_pointer_width = "32")) { - return - } - - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [profile.release] - lto = true - "#) - .file("src/main.rs", "fn main() {}"); - assert_that(p.cargo_process("build").arg("-v").arg("--release"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} test v0.0.0 ({url}) -{running} `rustc src[..]main.rs --crate-name test --crate-type bin \ - -C opt-level=3 \ - -C lto \ - --out-dir {dir}[..]target[..]release \ - --emit=dep-info,link \ - -L dependency={dir}[..]target[..]release \ - -L dependency={dir}[..]target[..]release[..]deps` -", -running = RUNNING, compiling = COMPILING, -dir = p.root().display(), -url = p.url(), -))); -}); - -test!(verbose_build { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", ""); - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} test v0.0.0 ({url}) -{running} `rustc src[..]lib.rs --crate-name test --crate-type lib -g \ - --out-dir {dir}[..]target[..]debug \ - --emit=dep-info,link \ - -L dependency={dir}[..]target[..]debug \ - -L dependency={dir}[..]target[..]debug[..]deps` -", -running = RUNNING, compiling = COMPILING, -dir = p.root().display(), -url = p.url(), -))); -}); - -test!(verbose_release_build { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", ""); - assert_that(p.cargo_process("build").arg("-v").arg("--release"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} test v0.0.0 ({url}) -{running} `rustc src[..]lib.rs --crate-name test --crate-type lib \ - -C opt-level=3 \ - --out-dir {dir}[..]target[..]release \ - --emit=dep-info,link \ - -L dependency={dir}[..]target[..]release \ - -L dependency={dir}[..]target[..]release[..]deps` -", -running = RUNNING, compiling = COMPILING, -dir = p.root().display(), -url = p.url(), -))); -}); - -test!(verbose_release_build_deps { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [dependencies.foo] - path = "foo" - "#) - .file("src/lib.rs", "") - .file("foo/Cargo.toml", r#" - [package] - - name = "foo" - version = "0.0.0" - authors = [] - - [lib] - name = "foo" - crate_type = ["dylib", "rlib"] - "#) - .file("foo/src/lib.rs", ""); - assert_that(p.cargo_process("build").arg("-v").arg("--release"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.0 ({url}) -{running} `rustc foo[..]src[..]lib.rs --crate-name foo \ - --crate-type dylib --crate-type rlib -C prefer-dynamic \ - -C opt-level=3 \ - -C metadata=[..] \ - -C extra-filename=-[..] \ - --out-dir {dir}[..]target[..]release[..]deps \ - --emit=dep-info,link \ - -L dependency={dir}[..]target[..]release[..]deps \ - -L dependency={dir}[..]target[..]release[..]deps` -{compiling} test v0.0.0 ({url}) -{running} `rustc src[..]lib.rs --crate-name test --crate-type lib \ - -C opt-level=3 \ - --out-dir {dir}[..]target[..]release \ - --emit=dep-info,link \ - -L dependency={dir}[..]target[..]release \ - -L dependency={dir}[..]target[..]release[..]deps \ - --extern foo={dir}[..]target[..]release[..]deps[..]\ - {prefix}foo-[..]{suffix} \ - --extern foo={dir}[..]target[..]release[..]deps[..]libfoo-[..].rlib` -", - running = RUNNING, - compiling = COMPILING, - dir = p.root().display(), - url = p.url(), - prefix = env::consts::DLL_PREFIX, - suffix = env::consts::DLL_SUFFIX))); -}); - -test!(explicit_examples { - let mut p = project("world"); - p = p.file("Cargo.toml", r#" - [package] - name = "world" - version = "1.0.0" - authors = [] - - [lib] - name = "world" - path = "src/lib.rs" - - [[example]] - name = "hello" - path = "examples/ex-hello.rs" - - [[example]] - name = "goodbye" - path = "examples/ex-goodbye.rs" - "#) - .file("src/lib.rs", r#" - pub fn get_hello() -> &'static str { "Hello" } - pub fn get_goodbye() -> &'static str { "Goodbye" } - pub fn get_world() -> &'static str { "World" } - "#) - .file("examples/ex-hello.rs", r#" - extern crate world; - fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); } - "#) - .file("examples/ex-goodbye.rs", r#" - extern crate world; - fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); } - "#); - - assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); - assert_that(process(&p.bin("examples/hello")).unwrap(), - execs().with_stdout("Hello, World!\n")); - assert_that(process(&p.bin("examples/goodbye")).unwrap(), - execs().with_stdout("Goodbye, World!\n")); -}); - -test!(implicit_examples { - let mut p = project("world"); - p = p.file("Cargo.toml", r#" - [package] - name = "world" - version = "1.0.0" - authors = [] - "#) - .file("src/lib.rs", r#" - pub fn get_hello() -> &'static str { "Hello" } - pub fn get_goodbye() -> &'static str { "Goodbye" } - pub fn get_world() -> &'static str { "World" } - "#) - .file("examples/hello.rs", r#" - extern crate world; - fn main() { - println!("{}, {}!", world::get_hello(), world::get_world()); - } - "#) - .file("examples/goodbye.rs", r#" - extern crate world; - fn main() { - println!("{}, {}!", world::get_goodbye(), world::get_world()); - } - "#); - - assert_that(p.cargo_process("test"), execs().with_status(0)); - assert_that(process(&p.bin("examples/hello")).unwrap(), - execs().with_stdout("Hello, World!\n")); - assert_that(process(&p.bin("examples/goodbye")).unwrap(), - execs().with_stdout("Goodbye, World!\n")); -}); - -test!(standard_build_no_ndebug { - let p = project("world") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", r#" - fn main() { - if cfg!(debug_assertions) { - println!("slow") - } else { - println!("fast") - } - } - "#); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout("slow\n")); -}); - -test!(release_build_ndebug { - let p = project("world") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", r#" - fn main() { - if cfg!(debug_assertions) { - println!("slow") - } else { - println!("fast") - } - } - "#); - - assert_that(p.cargo_process("build").arg("--release"), - execs().with_status(0)); - assert_that(process(&p.release_bin("foo")).unwrap(), - execs().with_stdout("fast\n")); -}); - -test!(inferred_main_bin { - let p = project("world") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(process(&p.bin("foo")).unwrap(), execs().with_status(0)); -}); - -test!(deletion_causes_failure { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", r#" - extern crate bar; - fn main() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - let p = p.file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#); - assert_that(p.cargo_process("build"), execs().with_status(101)); -}); - -test!(bad_cargo_toml_in_target_dir { - let p = project("world") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("target/Cargo.toml", "bad-toml"); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(process(&p.bin("foo")).unwrap(), execs().with_status(0)); -}); - -test!(lib_with_standard_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - pub fn foo() {} - ") - .file("src/main.rs", " - extern crate syntax; - fn main() { syntax::foo() } - "); - - assert_that(p.cargo_process("build"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} syntax v0.0.1 ({dir}) -", - compiling = COMPILING, - dir = p.url()))); -}); - -test!(simple_staticlib { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [lib] - name = "foo" - crate-type = ["staticlib"] - "#) - .file("src/lib.rs", "pub fn foo() {}"); - - // env var is a test for #1381 - assert_that(p.cargo_process("build").env("RUST_LOG", "nekoneko=trace"), - execs().with_status(0)); -}); - -test!(staticlib_rlib_and_bin { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [lib] - name = "foo" - crate-type = ["staticlib", "rlib"] - "#) - .file("src/lib.rs", "pub fn foo() {}") - .file("src/main.rs", r#" - extern crate foo; - - fn main() { - foo::foo(); - }"#); - - assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); -}); - -test!(opt_out_of_bin { - let p = project("foo") - .file("Cargo.toml", r#" - bin = [] - - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#) - .file("src/lib.rs", "") - .file("src/main.rs", "bad syntax"); - assert_that(p.cargo_process("build"), execs().with_status(0)); -}); - -test!(single_lib { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [lib] - name = "foo" - path = "src/bar.rs" - "#) - .file("src/bar.rs", ""); - assert_that(p.cargo_process("build"), execs().with_status(0)); -}); - -test!(freshness_ignores_excluded { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - exclude = ["src/b*.rs"] - "#) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }"); - foo.build(); - foo.root().move_into_the_past().unwrap(); - - assert_that(foo.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.0 ({url}) -", compiling = COMPILING, url = foo.url()))); - - // Smoke test to make sure it doesn't compile again - println!("first pass"); - assert_that(foo.cargo("build"), - execs().with_status(0) - .with_stdout("")); - - // Modify an ignored file and make sure we don't rebuild - println!("second pass"); - File::create(&foo.root().join("src/bar.rs")).unwrap(); - assert_that(foo.cargo("build"), - execs().with_status(0) - .with_stdout("")); -}); - -test!(rebuild_preserves_out_dir { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = 'build.rs' - "#) - .file("build.rs", r#" - use std::env; - use std::fs::File; - use std::path::Path; - - fn main() { - let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo"); - if env::var_os("FIRST").is_some() { - File::create(&path).unwrap(); - } else { - File::create(&path).unwrap(); - } - } - "#) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }"); - foo.build(); - foo.root().move_into_the_past().unwrap(); - - assert_that(foo.cargo("build").env("FIRST", "1"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.0 ({url}) -", compiling = COMPILING, url = foo.url()))); - - File::create(&foo.root().join("src/bar.rs")).unwrap(); - assert_that(foo.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.0 ({url}) -", compiling = COMPILING, url = foo.url()))); -}); - -test!(dep_no_libs { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - path = "bar" - "#) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.0" - authors = [] - "#) - .file("bar/src/main.rs", ""); - assert_that(foo.cargo_process("build"), - execs().with_status(0)); -}); - -test!(recompile_space_in_name { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [lib] - name = "foo" - path = "src/my lib.rs" - "#) - .file("src/my lib.rs", ""); - assert_that(foo.cargo_process("build"), execs().with_status(0)); - foo.root().move_into_the_past().unwrap(); - assert_that(foo.cargo("build"), - execs().with_status(0).with_stdout("")); -}); - -#[cfg(unix)] -test!(ignore_bad_directories { - use std::os::unix::prelude::*; - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", ""); - foo.build(); - let dir = foo.root().join("tmp"); - fs::create_dir(&dir).unwrap(); - let stat = fs::metadata(&dir).unwrap(); - let mut perms = stat.permissions(); - perms.set_mode(0o644); - fs::set_permissions(&dir, perms.clone()).unwrap(); - assert_that(foo.cargo("build"), - execs().with_status(0)); - perms.set_mode(0o755); - fs::set_permissions(&dir, perms).unwrap(); -}); - -test!(bad_cargo_config { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "") - .file(".cargo/config", r#" - this is not valid toml - "#); - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(101).with_stderr("\ -Couldn't load Cargo configuration - -Caused by: - could not parse TOML configuration in `[..]` - -Caused by: - could not parse input as TOML -[..].cargo[..]config:2:20-2:21 expected `=`, but found `i` - -")); -}); - -test!(cargo_platform_specific_dependency { - let host = ::rustc_host(); - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - - [target.{host}.dependencies] - dep = {{ path = "dep" }} - [target.{host}.build-dependencies] - build = {{ path = "build" }} - [target.{host}.dev-dependencies] - dev = {{ path = "dev" }} - "#, host = host)) - .file("src/main.rs", r#" - extern crate dep; - fn main() { dep::dep() } - "#) - .file("tests/foo.rs", r#" - extern crate dev; - #[test] - fn foo() { dev::dev() } - "#) - .file("build.rs", r#" - extern crate build; - fn main() { build::build(); } - "#) - .file("dep/Cargo.toml", r#" - [project] - name = "dep" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("dep/src/lib.rs", "pub fn dep() {}") - .file("build/Cargo.toml", r#" - [project] - name = "build" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("build/src/lib.rs", "pub fn build() {}") - .file("dev/Cargo.toml", r#" - [project] - name = "dev" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("dev/src/lib.rs", "pub fn dev() {}"); - - assert_that(p.cargo_process("build"), - execs().with_status(0)); - - assert_that(&p.bin("foo"), existing_file()); - assert_that(p.cargo_process("test"), - execs().with_status(0)); -}); - -test!(bad_platform_specific_dependency { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [target.wrong-target.dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("bar/src/lib.rs", r#" - extern crate baz; - - pub fn gimme() -> String { - format!("") - } - "#); - - assert_that(p.cargo_process("build"), - execs().with_status(101)); -}); - -test!(cargo_platform_specific_dependency_wrong_platform { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [target.non-existing-triplet.dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("bar/src/lib.rs", r#" - invalid rust file, should not be compiled - "#); - - p.cargo_process("build").exec_with_output().unwrap(); - - assert_that(&p.bin("foo"), existing_file()); - assert_that(process(&p.bin("foo")).unwrap(), - execs()); - - let loc = p.root().join("Cargo.lock"); - let mut lockfile = String::new(); - File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap(); - assert!(lockfile.contains("bar")) -}); - -test!(example_bin_same_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", "fn main() {}") - .file("examples/foo.rs", "fn main() {}"); - - p.cargo_process("test").arg("--no-run").arg("-v") - .exec_with_output() - .unwrap(); - - assert_that(&p.bin("foo"), is_not(existing_file())); - assert_that(&p.bin("examples/foo"), existing_file()); - - p.cargo("test").arg("--no-run").arg("-v") - .exec_with_output() - .unwrap(); - - assert_that(&p.bin("foo"), is_not(existing_file())); - assert_that(&p.bin("examples/foo"), existing_file()); -}); - -test!(compile_then_delete { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("run"), execs().with_status(0)); - assert_that(&p.bin("foo"), existing_file()); - if cfg!(windows) { - // On windows unlinking immediately after running often fails, so sleep - thread::sleep_ms(100); - } - fs::remove_file(&p.bin("foo")).unwrap(); - assert_that(p.cargo("run"), - execs().with_status(0)); -}); - -test!(transitive_dependencies_not_available { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.aaaaa] - path = "a" - "#) - .file("src/main.rs", "extern crate bbbbb; extern crate aaaaa; fn main() {}") - .file("a/Cargo.toml", r#" - [package] - name = "aaaaa" - version = "0.0.1" - authors = [] - - [dependencies.bbbbb] - path = "../b" - "#) - .file("a/src/lib.rs", "extern crate bbbbb;") - .file("b/Cargo.toml", r#" - [package] - name = "bbbbb" - version = "0.0.1" - authors = [] - "#) - .file("b/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101) - .with_stderr("\ -[..] can't find crate for `bbbbb` -[..] extern crate bbbbb; [..] -[..] -error: aborting due to previous error -Could not compile `foo`. - -Caused by: - [..] -")); -}); - -test!(cyclic_deps_rejected { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies.foo] - path = ".." - "#) - .file("a/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101) - .with_stderr("\ -cyclic package dependency: package `foo v0.0.1 ([..])` depends on itself -")); -}); - -test!(predictable_filenames { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate-type = ["staticlib", "dylib", "rlib"] - "#) - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); - assert_that(&p.root().join("target/debug/libfoo.a"), existing_file()); - assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file()); - let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, - env::consts::DLL_SUFFIX); - assert_that(&p.root().join("target/debug").join(dylib_name), - existing_file()); -}); - -test!(dashes_to_underscores { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo-bar" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "") - .file("src/main.rs", "extern crate foo_bar; fn main() {}"); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); - assert_that(&p.bin("foo-bar"), existing_file()); -}); - -test!(dashes_in_crate_name_bad { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo-bar" - "#) - .file("src/lib.rs", "") - .file("src/main.rs", "extern crate foo_bar; fn main() {}"); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101)); -}); - -test!(rustc_env_var { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", ""); - p.build(); - - assert_that(p.cargo("build") - .env("RUSTC", "rustc-that-does-not-exist").arg("-v"), - execs().with_status(101) - .with_stderr("\ -Could not execute process `rustc-that-does-not-exist -vV` ([..]) - -Caused by: -[..] -")); - assert_that(&p.bin("a"), is_not(existing_file())); -}); - -test!(filtering { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("src/bin/b.rs", "fn main() {}") - .file("examples/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}"); - p.build(); - - assert_that(p.cargo("build").arg("--lib"), - execs().with_status(0)); - assert_that(&p.bin("a"), is_not(existing_file())); - - assert_that(p.cargo("build").arg("--bin=a").arg("--example=a"), - execs().with_status(0)); - assert_that(&p.bin("a"), existing_file()); - assert_that(&p.bin("b"), is_not(existing_file())); - assert_that(&p.bin("examples/a"), existing_file()); - assert_that(&p.bin("examples/b"), is_not(existing_file())); -}); - -test!(ignore_dotfile { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/bin/.a.rs", "") - .file("src/bin/a.rs", "fn main() {}"); - p.build(); - - assert_that(p.cargo("build"), - execs().with_status(0)); -}); - -test!(custom_target_dir { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); - - assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"), - execs().with_status(0)); - assert_that(&p.root().join("foo/target/debug").join(&exe_name), - existing_file()); - assert_that(&p.root().join("target/debug").join(&exe_name), - is_not(existing_file())); - - assert_that(p.cargo("build"), - execs().with_status(0)); - assert_that(&p.root().join("foo/target/debug").join(&exe_name), - existing_file()); - assert_that(&p.root().join("target/debug").join(&exe_name), - existing_file()); - - fs::create_dir(p.root().join(".cargo")).unwrap(); - File::create(p.root().join(".cargo/config")).unwrap().write_all(br#" - [build] - target-dir = "bar/target" - "#).unwrap(); - assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"), - execs().with_status(0)); - assert_that(&p.root().join("bar/target/debug").join(&exe_name), - existing_file()); - assert_that(&p.root().join("foo/target/debug").join(&exe_name), - existing_file()); - assert_that(&p.root().join("target/debug").join(&exe_name), - existing_file()); -}); - -test!(rustc_no_trans { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - assert_that(p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"), - execs().with_status(0)); -}); diff --git a/tests/test_cargo_compile_custom_build.rs b/tests/test_cargo_compile_custom_build.rs deleted file mode 100644 index f172e34be49..00000000000 --- a/tests/test_cargo_compile_custom_build.rs +++ /dev/null @@ -1,1368 +0,0 @@ -use std::env; -use std::fs::{self, File}; -use std::io::prelude::*; - -use support::{project, execs}; -use support::{COMPILING, RUNNING, DOCTEST, FRESH}; -use support::paths::CargoPathExt; -use hamcrest::{assert_that}; - -fn setup() { -} - -test!(custom_build_script_failed { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("build.rs", r#" - fn main() { - std::process::exit(101); - } - "#); - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 ({url}) -{running} `rustc build.rs --crate-name build_script_build --crate-type bin [..]` -{running} `[..]build-script-build[..]` -", -url = p.url(), compiling = COMPILING, running = RUNNING)) - .with_stderr(&format!("\ -failed to run custom build command for `foo v0.5.0 ({})` -Process didn't exit successfully: `[..]build[..]build-script-build[..]` \ - (exit code: 101)", -p.url()))); -}); - -test!(custom_build_env_vars { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [features] - bar_feat = ["bar/foo"] - - [dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - - [features] - foo = [] - "#) - .file("bar/src/lib.rs", r#" - pub fn hello() {} - "#); - - let file_content = format!(r#" - use std::env; - use std::io::prelude::*; - use std::path::Path; - use std::fs; - - fn main() {{ - let _target = env::var("TARGET").unwrap(); - let _ncpus = env::var("NUM_JOBS").unwrap(); - let _dir = env::var("CARGO_MANIFEST_DIR").unwrap(); - - let opt = env::var("OPT_LEVEL").unwrap(); - assert_eq!(opt, "0"); - - let opt = env::var("PROFILE").unwrap(); - assert_eq!(opt, "debug"); - - let debug = env::var("DEBUG").unwrap(); - assert_eq!(debug, "true"); - - let out = env::var("OUT_DIR").unwrap(); - assert!(out.starts_with(r"{0}")); - assert!(fs::metadata(&out).map(|m| m.is_dir()).unwrap_or(false)); - - let _host = env::var("HOST").unwrap(); - - let _feat = env::var("CARGO_FEATURE_FOO").unwrap(); - }} - "#, - p.root().join("target").join("debug").join("build").display()); - - let p = p.file("bar/build.rs", &file_content); - - - assert_that(p.cargo_process("build").arg("--features").arg("bar_feat"), - execs().with_status(0)); -}); - -test!(custom_build_script_wrong_rustc_flags { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("build.rs", r#" - fn main() { - println!("cargo:rustc-flags=-aaa -bbb"); - } - "#); - - assert_that(p.cargo_process("build"), - execs().with_status(101) - .with_stderr(&format!("\ -Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ({})`: \ -`-aaa -bbb`", -p.url()))); -}); - -/* -test!(custom_build_script_rustc_flags { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.foo] - path = "foo" - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("foo/Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - "#) - .file("foo/src/lib.rs", r#" - "#) - .file("foo/build.rs", r#" - fn main() { - println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2"); - } - "#); - - // TODO: TEST FAILS BECAUSE OF WRONG STDOUT (but otherwise, the build works) - assert_that(p.cargo_process("build").arg("--verbose"), - execs().with_status(101) - .with_stdout(&format!("\ -{compiling} bar v0.5.0 ({url}) -{running} `rustc {dir}{sep}src{sep}lib.rs --crate-name test --crate-type lib -g \ - -C metadata=[..] \ - -C extra-filename=-[..] \ - --out-dir {dir}{sep}target \ - --emit=dep-info,link \ - -L {dir}{sep}target \ - -L {dir}{sep}target{sep}deps` -", -running = RUNNING, compiling = COMPILING, sep = path::SEP, -dir = p.root().display(), -url = p.url(), -))); -}); -*/ - -test!(links_no_build_cmd { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "a" - "#) - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101) - .with_stderr("\ -package `foo v0.5.0 (file://[..])` specifies that it links to `a` but does \ -not have a custom build script -")); -}); - -test!(links_duplicates { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - - [dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("build.rs", "") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - "#) - .file("a/src/lib.rs", "") - .file("a/build.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101) - .with_stderr("\ -native library `a` is being linked to by more than one package, and can only be \ -linked to by one package - - [..] v0.5.0 (file://[..]) - [..] v0.5.0 (file://[..]) -")); -}); - -test!(overrides_and_links { - let target = ::rustc_host(); - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - use std::env; - fn main() { - assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); - assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); - } - "#) - .file(".cargo/config", &format!(r#" - [target.{}.foo] - rustc-flags = "-L foo -L bar" - foo = "bar" - bar = "baz" - "#, target)) - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#) - .file("a/src/lib.rs", "") - .file("a/build.rs", "not valid rust code"); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -[..] -[..] -[..] -[..] -[..] -{running} `rustc [..] --crate-name foo [..] -L foo -L bar[..]` -", running = RUNNING))); -}); - -test!(unused_overrides { - let target = ::rustc_host(); - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file(".cargo/config", &format!(r#" - [target.{}.foo] - rustc-flags = "-L foo -L bar" - foo = "bar" - bar = "baz" - "#, target)); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); -}); - -test!(links_passes_env_vars { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - use std::env; - fn main() { - assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); - assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); - } - "#) - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#) - .file("a/src/lib.rs", "") - .file("a/build.rs", r#" - fn main() { - println!("cargo:foo=bar"); - println!("cargo:bar=baz"); - } - "#); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} [..] v0.5.0 (file://[..]) -{running} `rustc [..]build.rs [..]` -{compiling} [..] v0.5.0 (file://[..]) -{running} `rustc [..]build.rs [..]` -{running} `[..]` -{running} `[..]` -{running} `[..]` -{running} `rustc [..] --crate-name foo [..]` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(only_rerun_build_script { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - fn main() {} - "#); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); - p.root().move_into_the_past().unwrap(); - - File::create(&p.root().join("some-new-file")).unwrap(); - p.root().move_into_the_past().unwrap(); - - assert_that(p.cargo("build").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 (file://[..]) -{running} `[..]build-script-build[..]` -{running} `rustc [..] --crate-name foo [..]` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(rebuild_continues_to_pass_env_vars { - let a = project("a") - .file("Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - fn main() { - println!("cargo:foo=bar"); - println!("cargo:bar=baz"); - } - "#); - a.build(); - a.root().move_into_the_past().unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a] - path = '{}' - "#, a.root().display())) - .file("src/lib.rs", "") - .file("build.rs", r#" - use std::env; - fn main() { - assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); - assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); - } - "#); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); - p.root().move_into_the_past().unwrap(); - - File::create(&p.root().join("some-new-file")).unwrap(); - p.root().move_into_the_past().unwrap(); - - assert_that(p.cargo("build").arg("-v"), - execs().with_status(0)); -}); - -test!(testing_and_such { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - fn main() {} - "#); - - println!("build"); - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); - p.root().move_into_the_past().unwrap(); - - File::create(&p.root().join("src/lib.rs")).unwrap(); - p.root().move_into_the_past().unwrap(); - - println!("test"); - assert_that(p.cargo("test").arg("-vj1"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 (file://[..]) -{running} `[..]build-script-build[..]` -{running} `rustc [..] --crate-name foo [..]` -{running} `rustc [..] --crate-name foo [..]` -{running} `[..]foo-[..][..]` - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo -{running} `rustdoc --test [..]` - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, doctest = DOCTEST))); - - println!("doc"); - assert_that(p.cargo("doc").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 (file://[..]) -{running} `rustdoc [..]` -", compiling = COMPILING, running = RUNNING))); - - File::create(&p.root().join("src/main.rs")).unwrap() - .write_all(b"fn main() {}").unwrap(); - println!("run"); - assert_that(p.cargo("run"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 (file://[..]) -{running} `target[..]foo[..]` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(propagation_of_l_flags { - let target = ::rustc_host(); - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "bar" - build = "build.rs" - - [dependencies.b] - path = "../b" - "#) - .file("a/src/lib.rs", "") - .file("a/build.rs", r#" - fn main() { - println!("cargo:rustc-flags=-L bar"); - } - "#) - .file("b/Cargo.toml", r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#) - .file("b/src/lib.rs", "") - .file("b/build.rs", "bad file") - .file(".cargo/config", &format!(r#" - [target.{}.foo] - rustc-flags = "-L foo" - "#, target)); - - assert_that(p.cargo_process("build").arg("-v").arg("-j1"), - execs().with_status(0) - .with_stdout(&format!("\ -[..] -[..] -[..] -[..] -{running} `[..]a-[..]build-script-build[..]` -{running} `rustc [..] --crate-name a [..]-L bar[..]-L foo[..]` -{compiling} foo v0.5.0 (file://[..]) -{running} `rustc [..] --crate-name foo [..] -L bar -L foo` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(propagation_of_l_flags_new { - let target = ::rustc_host(); - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "bar" - build = "build.rs" - - [dependencies.b] - path = "../b" - "#) - .file("a/src/lib.rs", "") - .file("a/build.rs", r#" - fn main() { - println!("cargo:rustc-link-search=bar"); - } - "#) - .file("b/Cargo.toml", r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#) - .file("b/src/lib.rs", "") - .file("b/build.rs", "bad file") - .file(".cargo/config", &format!(r#" - [target.{}.foo] - rustc-link-search = ["foo"] - "#, target)); - - assert_that(p.cargo_process("build").arg("-v").arg("-j1"), - execs().with_status(0) - .with_stdout(&format!("\ -[..] -[..] -[..] -[..] -{running} `[..]a-[..]build-script-build[..]` -{running} `rustc [..] --crate-name a [..]-L bar[..]-L foo[..]` -{compiling} foo v0.5.0 (file://[..]) -{running} `rustc [..] --crate-name foo [..] -L bar -L foo` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(build_deps_simple { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - [build-dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("build.rs", " - extern crate a; - fn main() {} - ") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - "#) - .file("a/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} a v0.5.0 (file://[..]) -{running} `rustc [..] --crate-name a [..]` -{compiling} foo v0.5.0 (file://[..]) -{running} `rustc build.rs [..] --extern a=[..]` -{running} `[..]foo-[..]build-script-build[..]` -{running} `rustc [..] --crate-name foo [..]` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(build_deps_not_for_normal { - let target = ::rustc_host(); - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - [build-dependencies.aaaaa] - path = "a" - "#) - .file("src/lib.rs", "extern crate aaaaa;") - .file("build.rs", " - extern crate aaaaa; - fn main() {} - ") - .file("a/Cargo.toml", r#" - [project] - name = "aaaaa" - version = "0.5.0" - authors = [] - "#) - .file("a/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), - execs().with_status(101) - .with_stderr("\ -[..]lib.rs[..] error: can't find crate for `aaaaa` -[..]lib.rs[..] extern crate aaaaa; -[..] ^~~~~~~~~~~~~~~~~~~ -error: aborting due to previous error -Could not compile `foo`. - -Caused by: - Process didn't exit successfully: [..] -")); -}); - -test!(build_cmd_with_a_build_cmd { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [build-dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("build.rs", " - extern crate a; - fn main() {} - ") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - - [build-dependencies.b] - path = "../b" - "#) - .file("a/src/lib.rs", "") - .file("a/build.rs", "extern crate b; fn main() {}") - .file("b/Cargo.toml", r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - "#) - .file("b/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} b v0.5.0 (file://[..]) -{running} `rustc [..] --crate-name b [..]` -{compiling} a v0.5.0 (file://[..]) -{running} `rustc a[..]build.rs [..] --extern b=[..]` -{running} `[..]a-[..]build-script-build[..]` -{running} `rustc [..]lib.rs --crate-name a --crate-type lib -g \ - -C metadata=[..] -C extra-filename=-[..] \ - --out-dir [..]target[..]deps --emit=dep-info,link \ - -L [..]target[..]deps -L [..]target[..]deps` -{compiling} foo v0.5.0 (file://[..]) -{running} `rustc build.rs --crate-name build_script_build --crate-type bin \ - -g \ - --out-dir [..]build[..]foo-[..] --emit=dep-info,link \ - -L [..]target[..]debug -L [..]target[..]deps \ - --extern a=[..]liba-[..].rlib` -{running} `[..]foo-[..]build-script-build[..]` -{running} `rustc [..]lib.rs --crate-name foo --crate-type lib -g \ - --out-dir [..]target[..]debug --emit=dep-info,link \ - -L [..]target[..]debug -L [..]target[..]deps` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(out_dir_is_preserved { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - use std::env; - use std::fs::File; - use std::path::Path; - fn main() { - let out = env::var("OUT_DIR").unwrap(); - File::create(Path::new(&out).join("foo")).unwrap(); - } - "#); - - // Make the file - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); - p.root().move_into_the_past().unwrap(); - - // Change to asserting that it's there - File::create(&p.root().join("build.rs")).unwrap().write_all(br#" - use std::env; - use std::old_io::File; - fn main() { - let out = env::var("OUT_DIR").unwrap(); - File::open(&Path::new(&out).join("foo")).unwrap(); - } - "#).unwrap(); - p.root().move_into_the_past().unwrap(); - assert_that(p.cargo("build").arg("-v"), - execs().with_status(0)); - - // Run a fresh build where file should be preserved - assert_that(p.cargo("build").arg("-v"), - execs().with_status(0)); - - // One last time to make sure it's still there. - File::create(&p.root().join("foo")).unwrap(); - assert_that(p.cargo("build").arg("-v"), - execs().with_status(0)); -}); - -test!(output_separate_lines { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - fn main() { - println!("cargo:rustc-flags=-L foo"); - println!("cargo:rustc-flags=-l static=foo"); - } - "#); - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 (file://[..]) -{running} `rustc build.rs [..]` -{running} `[..]foo-[..]build-script-build[..]` -{running} `rustc [..] --crate-name foo [..] -L foo -l static=foo` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(output_separate_lines_new { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - fn main() { - println!("cargo:rustc-link-search=foo"); - println!("cargo:rustc-link-lib=static=foo"); - } - "#); - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 (file://[..]) -{running} `rustc build.rs [..]` -{running} `[..]foo-[..]build-script-build[..]` -{running} `rustc [..] --crate-name foo [..] -L foo -l static=foo` -", compiling = COMPILING, running = RUNNING))); -}); - -#[cfg(not(windows))] // FIXME(#867) -test!(code_generation { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/main.rs", r#" - include!(concat!(env!("OUT_DIR"), "/hello.rs")); - - fn main() { - println!("{}", message()); - } - "#) - .file("build.rs", r#" - use std::env; - use std::fs::File; - use std::io::prelude::*; - use std::path::PathBuf; - - fn main() { - let dst = PathBuf::from(env::var("OUT_DIR").unwrap()); - let mut f = File::create(&dst.join("hello.rs")).unwrap(); - f.write_all(b" - pub fn message() -> &'static str { - \"Hello, World!\" - } - ").unwrap(); - } - "#); - assert_that(p.cargo_process("run"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 (file://[..]) -{running} `target[..]foo` -Hello, World! -", compiling = COMPILING, running = RUNNING))); - - assert_that(p.cargo_process("test"), - execs().with_status(0)); -}); - -test!(release_with_build_script { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - fn main() {} - "#); - - assert_that(p.cargo_process("build").arg("-v").arg("--release"), - execs().with_status(0)); -}); - -test!(build_script_only { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - "#) - .file("build.rs", r#"fn main() {}"#); - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101) - .with_stderr("\ -failed to parse manifest at `[..]` - -Caused by: - no targets specified in the manifest - either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present")); -}); - -test!(shared_dep_with_a_build_script { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a] - path = "a" - - [build-dependencies.b] - path = "b" - "#) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("a/build.rs", "fn main() {}") - .file("a/src/lib.rs", "") - .file("b/Cargo.toml", r#" - [package] - name = "b" - version = "0.5.0" - authors = [] - - [dependencies.a] - path = "../b" - "#) - .file("b/src/lib.rs", ""); - assert_that(p.cargo_process("build"), - execs().with_status(0)); -}); - -test!(transitive_dep_host { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [build-dependencies.b] - path = "b" - "#) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#) - .file("a/build.rs", "fn main() {}") - .file("a/src/lib.rs", "") - .file("b/Cargo.toml", r#" - [package] - name = "b" - version = "0.5.0" - authors = [] - - [lib] - name = "b" - plugin = true - - [dependencies.a] - path = "../a" - "#) - .file("b/src/lib.rs", ""); - assert_that(p.cargo_process("build"), - execs().with_status(0)); -}); - -test!(test_a_lib_with_a_build_command { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", r#" - include!(concat!(env!("OUT_DIR"), "/foo.rs")); - - /// ``` - /// foo::bar(); - /// ``` - pub fn bar() { - assert_eq!(foo(), 1); - } - "#) - .file("build.rs", r#" - use std::env; - use std::io::prelude::*; - use std::fs::File; - use std::path::PathBuf; - - fn main() { - let out = PathBuf::from(env::var("OUT_DIR").unwrap()); - File::create(out.join("foo.rs")).unwrap().write_all(b" - fn foo() -> i32 { 1 } - ").unwrap(); - } - "#); - assert_that(p.cargo_process("test"), - execs().with_status(0)); -}); - -test!(test_dev_dep_build_script { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dev-dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("a/build.rs", "fn main() {}") - .file("a/src/lib.rs", ""); - - assert_that(p.cargo_process("test"), execs().with_status(0)); -}); - -test!(build_script_with_dynamic_native_dependency { - let build = project("builder") - .file("Cargo.toml", r#" - [package] - name = "builder" - version = "0.0.1" - authors = [] - - [lib] - name = "builder" - crate-type = ["dylib"] - plugin = true - "#) - .file("src/lib.rs", r#" - #[no_mangle] - pub extern fn foo() {} - "#); - assert_that(build.cargo_process("build"), - execs().with_status(0)); - let src = build.root().join("target/debug"); - let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| { - let lib = lib.file_name().unwrap().to_str().unwrap(); - lib.starts_with(env::consts::DLL_PREFIX) && - lib.ends_with(env::consts::DLL_SUFFIX) - }).unwrap(); - let libname = lib.file_name().unwrap().to_str().unwrap(); - let libname = &libname[env::consts::DLL_PREFIX.len().. - libname.len() - env::consts::DLL_SUFFIX.len()]; - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [build-dependencies.bar] - path = "bar" - "#) - .file("build.rs", r#" - extern crate bar; - fn main() { bar::bar() } - "#) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - build = "build.rs" - "#) - .file("bar/build.rs", r#" - use std::env; - use std::path::PathBuf; - - fn main() { - let src = PathBuf::from(env::var("SRC").unwrap()); - println!("cargo:rustc-flags=-L {}", src.parent().unwrap() - .display()); - } - "#) - .file("bar/src/lib.rs", &format!(r#" - pub fn bar() {{ - #[link(name = "{}")] - extern {{ fn foo(); }} - unsafe {{ foo() }} - }} - "#, libname)); - - assert_that(foo.cargo_process("build").env("SRC", &lib), - execs().with_status(0)); -}); - -test!(profile_and_opt_level_set_correctly { - let build = project("builder") - .file("Cargo.toml", r#" - [package] - name = "builder" - version = "0.0.1" - authors = [] - build = "build.rs" - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - use std::env; - - fn main() { - assert_eq!(env::var("OPT_LEVEL").unwrap(), "3"); - assert_eq!(env::var("PROFILE").unwrap(), "release"); - assert_eq!(env::var("DEBUG").unwrap(), "false"); - } - "#); - assert_that(build.cargo_process("bench"), - execs().with_status(0)); -}); - -test!(build_script_with_lto { - let build = project("builder") - .file("Cargo.toml", r#" - [package] - name = "builder" - version = "0.0.1" - authors = [] - build = "build.rs" - - [profile.dev] - lto = true - "#) - .file("src/lib.rs", "") - .file("build.rs", r#" - fn main() { - } - "#); - assert_that(build.cargo_process("build"), - execs().with_status(0)); -}); - -test!(test_duplicate_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - build = "build.rs" - - [dependencies.bar] - path = "bar" - - [build-dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", r#" - extern crate bar; - fn main() { bar::do_nothing() } - "#) - .file("build.rs", r#" - extern crate bar; - fn main() { bar::do_nothing() } - "#) - .file("bar/Cargo.toml", r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - "#) - .file("bar/src/lib.rs", "pub fn do_nothing() {}"); - - assert_that(p.cargo_process("build"), execs().with_status(0)); -}); - -test!(cfg_feedback { - let build = project("builder") - .file("Cargo.toml", r#" - [package] - name = "builder" - version = "0.0.1" - authors = [] - build = "build.rs" - "#) - .file("src/main.rs", " - #[cfg(foo)] - fn main() {} - ") - .file("build.rs", r#" - fn main() { - println!("cargo:rustc-cfg=foo"); - } - "#); - assert_that(build.cargo_process("build"), - execs().with_status(0)); -}); - -test!(cfg_override { - let target = ::rustc_host(); - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - "#) - .file("src/main.rs", " - #[cfg(foo)] - fn main() {} - ") - .file("build.rs", "") - .file(".cargo/config", &format!(r#" - [target.{}.a] - rustc-cfg = ["foo"] - "#, target)); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); -}); - -test!(flags_go_into_tests { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - b = { path = "b" } - "#) - .file("src/lib.rs", "") - .file("tests/foo.rs", "") - .file("b/Cargo.toml", r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies] - a = { path = "../a" } - "#) - .file("b/src/lib.rs", "") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("a/src/lib.rs", "") - .file("a/build.rs", r#" - fn main() { - println!("cargo:rustc-link-search=test"); - } - "#); - - assert_that(p.cargo_process("test").arg("-v").arg("--test=foo"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} a v0.5.0 ([..] -{running} `rustc a[..]build.rs [..]` -{running} `[..]build-script-build[..]` -{running} `rustc a[..]src[..]lib.rs [..] -L test[..]` -{compiling} b v0.5.0 ([..] -{running} `rustc b[..]src[..]lib.rs [..] -L test[..]` -{compiling} foo v0.5.0 ([..] -{running} `rustc src[..]lib.rs [..] -L test[..]` -{running} `rustc tests[..]foo.rs [..] -L test[..]` -{running} `[..]foo-[..]` - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING))); - - assert_that(p.cargo("test").arg("-v").arg("-pb").arg("--lib"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} b v0.5.0 ([..] -{running} `rustc b[..]src[..]lib.rs [..] -L test[..]` -{fresh} a v0.5.0 ([..] -{running} `[..]b-[..]` - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, fresh = FRESH))); -}); diff --git a/tests/test_cargo_compile_git_deps.rs b/tests/test_cargo_compile_git_deps.rs deleted file mode 100644 index 3cf30a2f27e..00000000000 --- a/tests/test_cargo_compile_git_deps.rs +++ /dev/null @@ -1,1740 +0,0 @@ -use std::fs::{self, File}; -use std::io::prelude::*; -use std::path::Path; -use std::thread; -use git2; - -use support::{git, project, execs, main_file, path2url}; -use support::{COMPILING, UPDATING, RUNNING}; -use support::paths::{self, CargoPathExt}; -use hamcrest::{assert_that,existing_file}; -use cargo; -use cargo::util::process; - -fn setup() { -} - -test!(cargo_compile_simple_git_dep { - let project = project("foo"); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", r#" - [project] - - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [lib] - - name = "dep1" - "#) - .file("src/dep1.rs", r#" - pub fn hello() -> &'static str { - "hello world" - } - "#) - }).unwrap(); - - let project = project - .file("Cargo.toml", &format!(r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - - [[bin]] - - name = "foo" - "#, git_project.url())) - .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); - - let root = project.root(); - let git_root = git_project.root(); - - assert_that(project.cargo_process("build"), - execs() - .with_stdout(&format!("{} git repository `{}`\n\ - {} dep1 v0.5.0 ({}#[..])\n\ - {} foo v0.5.0 ({})\n", - UPDATING, path2url(git_root.clone()), - COMPILING, path2url(git_root), - COMPILING, path2url(root))) - .with_stderr("")); - - assert_that(&project.bin("foo"), existing_file()); - - assert_that( - cargo::util::process(&project.bin("foo")).unwrap(), - execs().with_stdout("hello world\n")); -}); - -test!(cargo_compile_git_dep_branch { - let project = project("foo"); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", r#" - [project] - - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [lib] - - name = "dep1" - "#) - .file("src/dep1.rs", r#" - pub fn hello() -> &'static str { - "hello world" - } - "#) - }).unwrap(); - - // Make a new branch based on the current HEAD commit - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let head = repo.head().unwrap().target().unwrap(); - let head = repo.find_commit(head).unwrap(); - repo.branch("branchy", &head, true).unwrap(); - - let project = project - .file("Cargo.toml", &format!(r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - branch = "branchy" - - [[bin]] - - name = "foo" - "#, git_project.url())) - .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); - - let root = project.root(); - let git_root = git_project.root(); - - assert_that(project.cargo_process("build"), - execs() - .with_stdout(&format!("{} git repository `{}`\n\ - {} dep1 v0.5.0 ({}?branch=branchy#[..])\n\ - {} foo v0.5.0 ({})\n", - UPDATING, path2url(git_root.clone()), - COMPILING, path2url(git_root), - COMPILING, path2url(root))) - .with_stderr("")); - - assert_that(&project.bin("foo"), existing_file()); - - assert_that( - cargo::util::process(&project.bin("foo")).unwrap(), - execs().with_stdout("hello world\n")); -}); - -test!(cargo_compile_git_dep_tag { - let project = project("foo"); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", r#" - [project] - - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [lib] - - name = "dep1" - "#) - .file("src/dep1.rs", r#" - pub fn hello() -> &'static str { - "hello world" - } - "#) - }).unwrap(); - - // Make a tag corresponding to the current HEAD - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let head = repo.head().unwrap().target().unwrap(); - repo.tag("v0.1.0", - &repo.find_object(head, None).unwrap(), - &repo.signature().unwrap(), - "make a new tag", - false).unwrap(); - - let project = project - .file("Cargo.toml", &format!(r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - tag = "v0.1.0" - - [[bin]] - - name = "foo" - "#, git_project.url())) - .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); - - let root = project.root(); - let git_root = git_project.root(); - - assert_that(project.cargo_process("build"), - execs() - .with_stdout(&format!("{} git repository `{}`\n\ - {} dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\ - {} foo v0.5.0 ({})\n", - UPDATING, path2url(git_root.clone()), - COMPILING, path2url(git_root), - COMPILING, path2url(root)))); - - assert_that(&project.bin("foo"), existing_file()); - - assert_that(cargo::util::process(&project.bin("foo")).unwrap(), - execs().with_stdout("hello world\n")); - - assert_that(project.cargo("build"), - execs().with_status(0)); -}); - -test!(cargo_compile_with_nested_paths { - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", r#" - [project] - - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [dependencies.dep2] - - version = "0.5.0" - path = "vendor/dep2" - - [lib] - - name = "dep1" - "#) - .file("src/dep1.rs", r#" - extern crate dep2; - - pub fn hello() -> &'static str { - dep2::hello() - } - "#) - .file("vendor/dep2/Cargo.toml", r#" - [project] - - name = "dep2" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [lib] - - name = "dep2" - "#) - .file("vendor/dep2/src/dep2.rs", r#" - pub fn hello() -> &'static str { - "hello world" - } - "#) - }).unwrap(); - - let p = project("parent") - .file("Cargo.toml", &format!(r#" - [project] - - name = "parent" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - version = "0.5.0" - git = '{}' - - [[bin]] - - name = "parent" - "#, git_project.url())) - .file("src/parent.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"])); - - p.cargo_process("build") - .exec_with_output() - .unwrap(); - - assert_that(&p.bin("parent"), existing_file()); - - assert_that(cargo::util::process(&p.bin("parent")).unwrap(), - execs().with_stdout("hello world\n")); -}); - -test!(cargo_compile_with_meta_package { - let git_project = git::new("meta-dep", |project| { - project - .file("dep1/Cargo.toml", r#" - [project] - - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [lib] - - name = "dep1" - "#) - .file("dep1/src/dep1.rs", r#" - pub fn hello() -> &'static str { - "this is dep1" - } - "#) - .file("dep2/Cargo.toml", r#" - [project] - - name = "dep2" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [lib] - - name = "dep2" - "#) - .file("dep2/src/dep2.rs", r#" - pub fn hello() -> &'static str { - "this is dep2" - } - "#) - }).unwrap(); - - let p = project("parent") - .file("Cargo.toml", &format!(r#" - [project] - - name = "parent" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - version = "0.5.0" - git = '{}' - - [dependencies.dep2] - - version = "0.5.0" - git = '{}' - - [[bin]] - - name = "parent" - "#, git_project.url(), git_project.url())) - .file("src/parent.rs", - &main_file(r#""{} {}", dep1::hello(), dep2::hello()"#, &["dep1", "dep2"])); - - p.cargo_process("build") - .exec_with_output() - .unwrap(); - - assert_that(&p.bin("parent"), existing_file()); - - assert_that(cargo::util::process(&p.bin("parent")).unwrap(), - execs().with_stdout("this is dep1 this is dep2\n")); -}); - -test!(cargo_compile_with_short_ssh_git { - let url = "git@github.com:a/dep"; - - let project = project("project") - .file("Cargo.toml", &format!(r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep] - - git = "{}" - - [[bin]] - - name = "foo" - "#, url)) - .file("src/foo.rs", &main_file(r#""{}", dep1::hello()"#, &["dep1"])); - - assert_that(project.cargo_process("build"), - execs() - .with_stdout("") - .with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - invalid url `{}`: relative URL without a base -", url))); -}); - -test!(two_revs_same_deps { - let bar = git::new("meta-dep", |project| { - project.file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - }).unwrap(); - - let repo = git2::Repository::open(&bar.root()).unwrap(); - let rev1 = repo.revparse_single("HEAD").unwrap().id(); - - // Commit the changes and make sure we trigger a recompile - File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" - pub fn bar() -> i32 { 2 } - "#).unwrap(); - git::add(&repo); - let rev2 = git::commit(&repo); - - let foo = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = '{}' - rev = "{}" - - [dependencies.baz] - path = "../baz" - "#, bar.url(), rev1)) - .file("src/main.rs", r#" - extern crate bar; - extern crate baz; - - fn main() { - assert_eq!(bar::bar(), 1); - assert_eq!(baz::baz(), 2); - } - "#); - - let baz = project("baz") - .file("Cargo.toml", &format!(r#" - [package] - name = "baz" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = '{}' - rev = "{}" - "#, bar.url(), rev2)) - .file("src/lib.rs", r#" - extern crate bar; - pub fn baz() -> i32 { bar::bar() } - "#); - - baz.build(); - - assert_that(foo.cargo_process("build").arg("-v"), - execs().with_status(0)); - assert_that(&foo.bin("foo"), existing_file()); - assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); -}); - -test!(recompilation { - let git_project = git::new("bar", |project| { - project - .file("Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [lib] - name = "bar" - "#) - .file("src/bar.rs", r#" - pub fn bar() {} - "#) - }).unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - git = '{}' - - [[bin]] - - name = "foo" - "#, git_project.url())) - .file("src/foo.rs", - &main_file(r#""{:?}", bar::bar()"#, &["bar"])); - - // First time around we should compile both foo and bar - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} git repository `{}`\n\ - {} bar v0.5.0 ({}#[..])\n\ - {} foo v0.5.0 ({})\n", - UPDATING, git_project.url(), - COMPILING, git_project.url(), - COMPILING, p.url()))); - - // Don't recompile the second time - assert_that(p.cargo("build"), - execs().with_stdout("")); - - // Modify a file manually, shouldn't trigger a recompile - File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#" - pub fn bar() { println!("hello!"); } - "#).unwrap(); - - assert_that(p.cargo("build"), - execs().with_stdout("")); - - assert_that(p.cargo("update"), - execs().with_stdout(&format!("{} git repository `{}`", - UPDATING, - git_project.url()))); - - assert_that(p.cargo("build"), - execs().with_stdout("")); - - // Commit the changes and make sure we don't trigger a recompile because the - // lockfile says not to change - let repo = git2::Repository::open(&git_project.root()).unwrap(); - git::add(&repo); - git::commit(&repo); - - println!("compile after commit"); - assert_that(p.cargo("build"), - execs().with_stdout("")); - p.root().move_into_the_past().unwrap(); - - // Update the dependency and carry on! - assert_that(p.cargo("update"), - execs().with_stdout(&format!("{} git repository `{}`\n\ - {} bar v0.5.0 ([..]) -> #[..]\n\ - ", - UPDATING, - git_project.url(), - UPDATING))); - println!("going for the last compile"); - assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({}#[..])\n\ - {} foo v0.5.0 ({})\n", - COMPILING, git_project.url(), - COMPILING, p.url()))); - - // Make sure clean only cleans one dep - assert_that(p.cargo("clean") - .arg("-p").arg("foo"), - execs().with_stdout("")); - assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} foo v0.5.0 ({})\n", - COMPILING, p.url()))); -}); - -test!(update_with_shared_deps { - let git_project = git::new("bar", |project| { - project - .file("Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [lib] - name = "bar" - "#) - .file("src/bar.rs", r#" - pub fn bar() {} - "#) - }).unwrap(); - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - path = "dep1" - [dependencies.dep2] - path = "dep2" - "#) - .file("src/main.rs", r#" - extern crate dep1; - extern crate dep2; - fn main() {} - "#) - .file("dep1/Cargo.toml", &format!(r#" - [package] - name = "dep1" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - version = "0.5.0" - git = '{}' - "#, git_project.url())) - .file("dep1/src/lib.rs", "") - .file("dep2/Cargo.toml", &format!(r#" - [package] - name = "dep2" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - version = "0.5.0" - git = '{}' - "#, git_project.url())) - .file("dep2/src/lib.rs", ""); - - // First time around we should compile both foo and bar - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("\ -{updating} git repository `{git}` -{compiling} bar v0.5.0 ({git}#[..]) -{compiling} [..] v0.5.0 ({dir}) -{compiling} [..] v0.5.0 ({dir}) -{compiling} foo v0.5.0 ({dir})\n", - updating = UPDATING, git = git_project.url(), - compiling = COMPILING, dir = p.url()))); - - // Modify a file manually, and commit it - File::create(&git_project.root().join("src/bar.rs")).unwrap().write_all(br#" - pub fn bar() { println!("hello!"); } - "#).unwrap(); - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let old_head = repo.head().unwrap().target().unwrap(); - git::add(&repo); - git::commit(&repo); - - thread::sleep_ms(1000); - - // By default, not transitive updates - println!("dep1 update"); - assert_that(p.cargo("update") - .arg("-p").arg("dep1"), - execs().with_stdout("")); - - // Specifying a precise rev to the old rev shouldn't actually update - // anything because we already have the rev in the db. - println!("bar precise update"); - assert_that(p.cargo("update") - .arg("-p").arg("bar") - .arg("--precise").arg(&old_head.to_string()), - execs().with_stdout("")); - - // Updating aggressively should, however, update the repo. - println!("dep1 aggressive update"); - assert_that(p.cargo("update") - .arg("-p").arg("dep1") - .arg("--aggressive"), - execs().with_stdout(&format!("{} git repository `{}`\n\ - {} bar v0.5.0 ([..]) -> #[..]\n\ - ", - UPDATING, - git_project.url(), - UPDATING))); - - // Make sure we still only compile one version of the git repo - println!("build"); - assert_that(p.cargo("build"), - execs().with_stdout(&format!("\ -{compiling} bar v0.5.0 ({git}#[..]) -{compiling} [..] v0.5.0 ({dir}) -{compiling} [..] v0.5.0 ({dir}) -{compiling} foo v0.5.0 ({dir})\n", - git = git_project.url(), - compiling = COMPILING, dir = p.url()))); - - // We should be able to update transitive deps - assert_that(p.cargo("update").arg("-p").arg("bar"), - execs().with_stdout(&format!("{} git repository `{}`", - UPDATING, - git_project.url()))); -}); - -test!(dep_with_submodule { - let project = project("foo"); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", r#" - [package] - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - "#) - }).unwrap(); - let git_project2 = git::new("dep2", |project| { - project.file("lib.rs", "pub fn dep() {}") - }).unwrap(); - - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let url = path2url(git_project2.root()).to_string(); - git::add_submodule(&repo, &url, Path::new("src")); - git::commit(&repo); - - let project = project - .file("Cargo.toml", &format!(r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - "#, git_project.url())) - .file("src/lib.rs", " - extern crate dep1; - pub fn foo() { dep1::dep() } - "); - - assert_that(project.cargo_process("build"), - execs().with_stderr("").with_status(0)); -}); - -test!(two_deps_only_update_one { - let project = project("foo"); - let git1 = git::new("dep1", |project| { - project - .file("Cargo.toml", r#" - [package] - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - "#) - .file("src/lib.rs", "") - }).unwrap(); - let git2 = git::new("dep2", |project| { - project - .file("Cargo.toml", r#" - [package] - name = "dep2" - version = "0.5.0" - authors = ["carlhuda@example.com"] - "#) - .file("src/lib.rs", "") - }).unwrap(); - - let project = project - .file("Cargo.toml", &format!(r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - git = '{}' - [dependencies.dep2] - git = '{}' - "#, git1.url(), git2.url())) - .file("src/main.rs", "fn main() {}"); - - assert_that(project.cargo_process("build"), - execs() - .with_stdout(&format!("{} git repository `[..]`\n\ - {} git repository `[..]`\n\ - {} [..] v0.5.0 ([..])\n\ - {} [..] v0.5.0 ([..])\n\ - {} foo v0.5.0 ({})\n", - UPDATING, - UPDATING, - COMPILING, - COMPILING, - COMPILING, project.url())) - .with_stderr("")); - - File::create(&git1.root().join("src/lib.rs")).unwrap().write_all(br#" - pub fn foo() {} - "#).unwrap(); - let repo = git2::Repository::open(&git1.root()).unwrap(); - git::add(&repo); - git::commit(&repo); - - assert_that(project.cargo("update") - .arg("-p").arg("dep1"), - execs() - .with_stdout(&format!("{} git repository `{}`\n\ - {} dep1 v0.5.0 ([..]) -> #[..]\n\ - ", - UPDATING, - git1.url(), - UPDATING)) - .with_stderr("")); -}); - -test!(stale_cached_version { - let bar = git::new("meta-dep", |project| { - project.file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.0" - authors = [] - "#) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - }).unwrap(); - - // Update the git database in the cache with the current state of the git - // repo - let foo = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = '{}' - "#, bar.url())) - .file("src/main.rs", r#" - extern crate bar; - - fn main() { assert_eq!(bar::bar(), 1) } - "#); - - assert_that(foo.cargo_process("build"), execs().with_status(0)); - assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); - - // Update the repo, and simulate someone else updating the lockfile and then - // us pulling it down. - File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" - pub fn bar() -> i32 { 1 + 0 } - "#).unwrap(); - let repo = git2::Repository::open(&bar.root()).unwrap(); - git::add(&repo); - git::commit(&repo); - - thread::sleep_ms(1000); - - let rev = repo.revparse_single("HEAD").unwrap().id(); - - File::create(&foo.root().join("Cargo.lock")).unwrap().write_all(format!(r#" - [root] - name = "foo" - version = "0.0.0" - dependencies = [ - 'bar 0.0.0 (git+{url}#{hash})' - ] - - [[package]] - name = "bar" - version = "0.0.0" - source = 'git+{url}#{hash}' - "#, url = bar.url(), hash = rev).as_bytes()).unwrap(); - - // Now build! - assert_that(foo.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -{updating} git repository `{bar}` -{compiling} bar v0.0.0 ({bar}#[..]) -{compiling} foo v0.0.0 ({foo}) -", updating = UPDATING, compiling = COMPILING, bar = bar.url(), foo = foo.url()))); - assert_that(foo.process(&foo.bin("foo")), execs().with_status(0)); -}); - -test!(dep_with_changed_submodule { - let project = project("foo"); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", r#" - [package] - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - "#) - }).unwrap(); - - let git_project2 = git::new("dep2", |project| { - project - .file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }") - }).unwrap(); - - let git_project3 = git::new("dep3", |project| { - project - .file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }") - }).unwrap(); - - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), - &Path::new("src")); - git::commit(&repo); - - let project = project - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - [dependencies.dep1] - git = '{}' - "#, git_project.url())) - .file("src/main.rs", " - extern crate dep1; - pub fn main() { println!(\"{}\", dep1::dep()) } - "); - - println!("first run"); - assert_that(project.cargo_process("run"), execs() - .with_stdout(&format!("{} git repository `[..]`\n\ - {} dep1 v0.5.0 ([..])\n\ - {} foo v0.5.0 ([..])\n\ - {} `target[..]foo[..]`\n\ - project2\ - ", - UPDATING, - COMPILING, - COMPILING, - RUNNING)) - .with_stderr("") - .with_status(0)); - - File::create(&git_project.root().join(".gitmodules")).unwrap() - .write_all(format!("[submodule \"src\"]\n\tpath = src\n\turl={}", - git_project3.url()).as_bytes()).unwrap(); - - // Sync the submodule and reset it to the new remote. - sub.sync().unwrap(); - { - let subrepo = sub.open().unwrap(); - let mut origin = subrepo.find_remote("origin").unwrap(); - origin.set_url(&git_project3.url().to_string()).unwrap(); - origin.add_fetch("refs/heads/*:refs/heads/*").unwrap();; - origin.fetch(&[], None).unwrap(); - origin.save().unwrap(); - - let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap(); - let obj = subrepo.find_object(id, None).unwrap(); - subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap(); - } - sub.add_to_index(true).unwrap(); - git::add(&repo); - git::commit(&repo); - - thread::sleep_ms(1000); - // Update the dependency and carry on! - println!("update"); - assert_that(project.cargo("update").arg("-v"), - execs() - .with_stderr("") - .with_stdout(&format!("{} git repository `{}`\n\ - {} dep1 v0.5.0 ([..]) -> #[..]\n\ - ", - UPDATING, - git_project.url(), - UPDATING))); - - println!("last run"); - assert_that(project.cargo("run"), execs() - .with_stdout(&format!("{compiling} dep1 v0.5.0 ([..])\n\ - {compiling} foo v0.5.0 ([..])\n\ - {running} `target[..]foo[..]`\n\ - project3\ - ", - compiling = COMPILING, running = RUNNING)) - .with_stderr("") - .with_status(0)); -}); - -test!(dev_deps_with_testing { - let p2 = git::new("bar", |project| { - project.file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", r#" - pub fn gimme() -> &'static str { "zoidberg" } - "#) - }).unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dev-dependencies.bar] - version = "0.5.0" - git = '{}' - "#, p2.url())) - .file("src/main.rs", r#" - fn main() {} - - #[cfg(test)] - mod tests { - extern crate bar; - #[test] fn foo() { bar::gimme(); } - } - "#); - - // Generate a lockfile which did not use `bar` to compile, but had to update - // `bar` to generate the lockfile - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("\ -{updating} git repository `{bar}` -{compiling} foo v0.5.0 ({url}) -", updating = UPDATING, compiling = COMPILING, url = p.url(), bar = p2.url()))); - - // Make sure we use the previous resolution of `bar` instead of updating it - // a second time. - assert_that(p.cargo("test"), - execs().with_stdout(&format!("\ -{compiling} [..] v0.5.0 ([..]) -{compiling} [..] v0.5.0 ([..] -{running} target[..]foo-[..] - -running 1 test -test tests::foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING))); -}); - -test!(git_build_cmd_freshness { - let foo = git::new("foo", |project| { - project.file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - "#) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - .file(".gitignore", " - src/bar.rs - ") - }).unwrap(); - foo.root().move_into_the_past().unwrap(); - - thread::sleep_ms(1000); - - assert_that(foo.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.0 ({url}) -", compiling = COMPILING, url = foo.url()))); - - // Smoke test to make sure it doesn't compile again - println!("first pass"); - assert_that(foo.cargo("build"), - execs().with_status(0) - .with_stdout("")); - - // Modify an ignored file and make sure we don't rebuild - println!("second pass"); - File::create(&foo.root().join("src/bar.rs")).unwrap(); - assert_that(foo.cargo("build"), - execs().with_status(0) - .with_stdout("")); -}); - -test!(git_name_not_always_needed { - let p2 = git::new("bar", |project| { - project.file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", r#" - pub fn gimme() -> &'static str { "zoidberg" } - "#) - }).unwrap(); - - let repo = git2::Repository::open(&p2.root()).unwrap(); - let mut cfg = repo.config().unwrap(); - let _ = cfg.remove("user.name"); - let _ = cfg.remove("user.email"); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dev-dependencies.bar] - git = '{}' - "#, p2.url())) - .file("src/main.rs", "fn main() {}"); - - // Generate a lockfile which did not use `bar` to compile, but had to update - // `bar` to generate the lockfile - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("\ -{updating} git repository `{bar}` -{compiling} foo v0.5.0 ({url}) -", updating = UPDATING, compiling = COMPILING, url = p.url(), bar = p2.url()))); -}); - -test!(git_repo_changing_no_rebuild { - let bar = git::new("bar", |project| { - project.file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - }).unwrap(); - - // Lock p1 to the first rev in the git repo - let p1 = project("p1") - .file("Cargo.toml", &format!(r#" - [project] - name = "p1" - version = "0.5.0" - authors = [] - build = 'build.rs' - [dependencies.bar] - git = '{}' - "#, bar.url())) - .file("src/main.rs", "fn main() {}") - .file("build.rs", "fn main() {}"); - p1.build(); - p1.root().move_into_the_past().unwrap(); - assert_that(p1.cargo("build"), - execs().with_stdout(&format!("\ -{updating} git repository `{bar}` -{compiling} [..] -{compiling} [..] -", updating = UPDATING, compiling = COMPILING, bar = bar.url()))); - - // Make a commit to lock p2 to a different rev - File::create(&bar.root().join("src/lib.rs")).unwrap().write_all(br#" - pub fn bar() -> i32 { 2 } - "#).unwrap(); - let repo = git2::Repository::open(&bar.root()).unwrap(); - git::add(&repo); - git::commit(&repo); - - // Lock p2 to the second rev - let p2 = project("p2") - .file("Cargo.toml", &format!(r#" - [project] - name = "p2" - version = "0.5.0" - authors = [] - [dependencies.bar] - git = '{}' - "#, bar.url())) - .file("src/main.rs", "fn main() {}"); - assert_that(p2.cargo_process("build"), - execs().with_stdout(&format!("\ -{updating} git repository `{bar}` -{compiling} [..] -{compiling} [..] -", updating = UPDATING, compiling = COMPILING, bar = bar.url()))); - - // And now for the real test! Make sure that p1 doesn't get rebuilt - // even though the git repo has changed. - assert_that(p1.cargo("build"), - execs().with_stdout("")); -}); - -test!(git_dep_build_cmd { - let p = git::new("foo", |project| { - project.file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "bar" - - [[bin]] - - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - - [lib] - - name = "bar" - "#) - .file("bar/src/bar.rs.in", r#" - pub fn gimme() -> i32 { 0 } - "#) - .file("bar/build.rs", r#" - use std::fs; - fn main() { - fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); - } - "#) - }).unwrap(); - - p.root().join("bar").move_into_the_past().unwrap(); - - assert_that(p.cargo("build"), - execs().with_status(0)); - - assert_that(cargo::util::process(&p.bin("foo")).unwrap(), - execs().with_stdout("0\n")); - - // Touching bar.rs.in should cause the `build` command to run again. - fs::File::create(&p.root().join("bar/src/bar.rs.in")).unwrap() - .write_all(b"pub fn gimme() -> i32 { 1 }").unwrap(); - - assert_that(p.cargo("build"), - execs().with_status(0)); - - assert_that(cargo::util::process(&p.bin("foo")).unwrap(), - execs().with_stdout("1\n")); -}); - -test!(fetch_downloads { - let bar = git::new("bar", |project| { - project.file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - }).unwrap(); - - let p = project("p1") - .file("Cargo.toml", &format!(r#" - [project] - name = "p1" - version = "0.5.0" - authors = [] - [dependencies.bar] - git = '{}' - "#, bar.url())) - .file("src/main.rs", "fn main() {}"); - assert_that(p.cargo_process("fetch"), - execs().with_status(0).with_stdout(&format!("\ -{updating} git repository `{url}` -", updating = UPDATING, url = bar.url()))); - - assert_that(p.cargo("fetch"), - execs().with_status(0).with_stdout("")); -}); - -test!(warnings_in_git_dep { - let bar = git::new("bar", |project| { - project.file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", "fn unused() {}") - }).unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.bar] - git = '{}' - "#, bar.url())) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("build"), - execs() - .with_stdout(&format!("{} git repository `{}`\n\ - {} bar v0.5.0 ({}#[..])\n\ - {} foo v0.5.0 ({})\n", - UPDATING, bar.url(), - COMPILING, bar.url(), - COMPILING, p.url())) - .with_stderr("")); -}); - -test!(update_ambiguous { - let foo1 = git::new("foo1", |project| { - project.file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", "") - }).unwrap(); - let foo2 = git::new("foo2", |project| { - project.file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.6.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", "") - }).unwrap(); - let bar = git::new("bar", |project| { - project.file("Cargo.toml", &format!(r#" - [package] - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.foo] - git = '{}' - "#, foo2.url())) - .file("src/lib.rs", "") - }).unwrap(); - - let p = project("project") - .file("Cargo.toml", &format!(r#" - [project] - name = "project" - version = "0.5.0" - authors = [] - [dependencies.foo] - git = '{}' - [dependencies.bar] - git = '{}' - "#, foo1.url(), bar.url())) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); - assert_that(p.cargo("update") - .arg("-p").arg("foo"), - execs().with_status(101) - .with_stderr("\ -There are multiple `foo` packages in your project, and the specification `foo` \ -is ambiguous. -Please re-run this command with `-p ` where `` is one of the \ -following: - foo:0.[..].0 - foo:0.[..].0 -")); -}); - -test!(update_one_dep_in_repo_with_many_deps { - let foo = git::new("foo", |project| { - project.file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", "") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("a/src/lib.rs", "") - }).unwrap(); - - let p = project("project") - .file("Cargo.toml", &format!(r#" - [project] - name = "project" - version = "0.5.0" - authors = [] - [dependencies.foo] - git = '{}' - [dependencies.a] - git = '{}' - "#, foo.url(), foo.url())) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("generate-lockfile"), execs().with_status(0)); - assert_that(p.cargo("update") - .arg("-p").arg("foo"), - execs().with_status(0) - .with_stdout(&format!("\ -Updating git repository `{}` -", foo.url()))); -}); - -test!(switch_deps_does_not_update_transitive { - let transitive = git::new("transitive", |project| { - project.file("Cargo.toml", r#" - [package] - name = "transitive" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", "") - }).unwrap(); - let dep1 = git::new("dep1", |project| { - project.file("Cargo.toml", &format!(r#" - [package] - name = "dep" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.transitive] - git = '{}' - "#, transitive.url())) - .file("src/lib.rs", "") - }).unwrap(); - let dep2 = git::new("dep2", |project| { - project.file("Cargo.toml", &format!(r#" - [package] - name = "dep" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.transitive] - git = '{}' - "#, transitive.url())) - .file("src/lib.rs", "") - }).unwrap(); - - let p = project("project") - .file("Cargo.toml", &format!(r#" - [project] - name = "project" - version = "0.5.0" - authors = [] - [dependencies.dep] - git = '{}' - "#, dep1.url())) - .file("src/main.rs", "fn main() {}"); - - p.build(); - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -Updating git repository `{}` -Updating git repository `{}` -{compiling} transitive [..] -{compiling} dep [..] -{compiling} project [..] -", dep1.url(), transitive.url(), compiling = COMPILING))); - - // Update the dependency to point to the second repository, but this - // shouldn't update the transitive dependency which is the same. - File::create(&p.root().join("Cargo.toml")).unwrap().write_all(format!(r#" - [project] - name = "project" - version = "0.5.0" - authors = [] - [dependencies.dep] - git = '{}' - "#, dep2.url()).as_bytes()).unwrap(); - - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -Updating git repository `{}` -{compiling} dep [..] -{compiling} project [..] -", dep2.url(), compiling = COMPILING))); -}); - -test!(update_one_source_updates_all_packages_in_that_git_source { - let dep = git::new("dep", |project| { - project.file("Cargo.toml", r#" - [package] - name = "dep" - version = "0.5.0" - authors = [] - - [dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.5.0" - authors = [] - "#) - .file("a/src/lib.rs", "") - }).unwrap(); - - let p = project("project") - .file("Cargo.toml", &format!(r#" - [project] - name = "project" - version = "0.5.0" - authors = [] - [dependencies.dep] - git = '{}' - "#, dep.url())) - .file("src/main.rs", "fn main() {}"); - - p.build(); - assert_that(p.cargo("build"), - execs().with_status(0)); - - let repo = git2::Repository::open(&dep.root()).unwrap(); - let rev1 = repo.revparse_single("HEAD").unwrap().id(); - - // Just be sure to change a file - File::create(&dep.root().join("src/lib.rs")).unwrap().write_all(br#" - pub fn bar() -> i32 { 2 } - "#).unwrap(); - git::add(&repo); - git::commit(&repo); - - assert_that(p.cargo("update").arg("-p").arg("dep"), - execs().with_status(0)); - let mut lockfile = String::new(); - File::open(&p.root().join("Cargo.lock")).unwrap() - .read_to_string(&mut lockfile).unwrap(); - assert!(!lockfile.contains(&rev1.to_string()), - "{} in {}", rev1, lockfile); -}); - -test!(switch_sources { - let a1 = git::new("a1", |project| { - project.file("Cargo.toml", r#" - [package] - name = "a" - version = "0.5.0" - authors = [] - "#) - .file("src/lib.rs", "") - }).unwrap(); - let a2 = git::new("a2", |project| { - project.file("Cargo.toml", r#" - [package] - name = "a" - version = "0.5.1" - authors = [] - "#) - .file("src/lib.rs", "") - }).unwrap(); - - let p = project("project") - .file("Cargo.toml", r#" - [project] - name = "project" - version = "0.5.0" - authors = [] - [dependencies.b] - path = "b" - "#) - .file("src/main.rs", "fn main() {}") - .file("b/Cargo.toml", &format!(r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies.a] - git = '{}' - "#, a1.url())) - .file("b/src/lib.rs", "fn main() {}"); - - p.build(); - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -{updating} git repository `file://[..]a1` -{compiling} a v0.5.0 ([..]a1#[..] -{compiling} b v0.5.0 ([..]) -{compiling} project v0.5.0 ([..]) -", updating = UPDATING, compiling = COMPILING))); - - File::create(&p.root().join("b/Cargo.toml")).unwrap().write_all(format!(r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies.a] - git = '{}' - "#, a2.url()).as_bytes()).unwrap(); - - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -{updating} git repository `file://[..]a2` -{compiling} a v0.5.1 ([..]a2#[..] -{compiling} b v0.5.0 ([..]) -{compiling} project v0.5.0 ([..]) -", updating = UPDATING, compiling = COMPILING))); -}); - -test!(dont_require_submodules_are_checked_out { - let project = project("foo"); - let git1 = git::new("dep1", |p| { - p.file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .file("a/foo", "") - }).unwrap(); - let git2 = git::new("dep2", |p| p).unwrap(); - - let repo = git2::Repository::open(&git1.root()).unwrap(); - let url = path2url(git2.root()).to_string(); - git::add_submodule(&repo, &url, &Path::new("a/submodule")); - git::commit(&repo); - - git2::Repository::init(&project.root()).unwrap(); - let url = path2url(git1.root()).to_string(); - let dst = paths::home().join("foo"); - git2::Repository::clone(&url, &dst).unwrap(); - - assert_that(git1.cargo("build").arg("-v").cwd(&dst), - execs().with_status(0)); -}); - -test!(doctest_same_name { - let a2 = git::new("a2", |p| { - p.file("Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - "#) - .file("src/lib.rs", "pub fn a2() {}") - }).unwrap(); - - let a1 = git::new("a1", |p| { - p.file("Cargo.toml", &format!(r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - [dependencies] - a = {{ git = '{}' }} - "#, a2.url())) - .file("src/lib.rs", "extern crate a; pub fn a1() {}") - }).unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = {{ git = '{}' }} - "#, a1.url())) - .file("src/lib.rs", r#" - #[macro_use] - extern crate a; - "#); - - assert_that(p.cargo_process("test").arg("-v"), - execs().with_status(0)); -}); - -test!(lints_are_suppressed { - let a = git::new("a", |p| { - p.file("Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - "#) - .file("src/lib.rs", " - use std::option; - ") - }).unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = {{ git = '{}' }} - "#, a.url())) - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} git repository `[..]` -{compiling} a v0.5.0 ([..]) -{compiling} foo v0.0.1 ([..]) -", compiling = COMPILING, updating = UPDATING))); -}); - -test!(denied_lints_are_allowed { - let enabled = super::RUSTC.with(|r| r.cap_lints); - if !enabled { return } - - let a = git::new("a", |p| { - p.file("Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - "#) - .file("src/lib.rs", " - #![deny(warnings)] - use std::option; - ") - }).unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = {{ git = '{}' }} - "#, a.url())) - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} git repository `[..]` -{compiling} a v0.5.0 ([..]) -{compiling} foo v0.0.1 ([..]) -", compiling = COMPILING, updating = UPDATING))); -}); diff --git a/tests/test_cargo_compile_path_deps.rs b/tests/test_cargo_compile_path_deps.rs deleted file mode 100644 index 87174cc4220..00000000000 --- a/tests/test_cargo_compile_path_deps.rs +++ /dev/null @@ -1,836 +0,0 @@ -use std::fs::{self, File}; -use std::io::prelude::*; -use std::thread; - -use support::{project, execs, main_file}; -use support::{COMPILING, RUNNING}; -use support::paths::{self, CargoPathExt}; -use hamcrest::{assert_that, existing_file}; -use cargo; -use cargo::util::{process}; - -fn setup() { -} - -test!(cargo_compile_with_nested_deps_shorthand { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "bar" - - [[bin]] - - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - - version = "0.5.0" - path = "baz" - - [lib] - - name = "bar" - "#) - .file("bar/src/bar.rs", r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#) - .file("bar/baz/Cargo.toml", r#" - [project] - - name = "baz" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "baz" - "#) - .file("bar/baz/src/baz.rs", r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#); - - assert_that(p.cargo_process("build"), - execs().with_status(0) - .with_stdout(&format!("{} baz v0.5.0 ({})\n\ - {} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url(), - COMPILING, p.url()))); - - assert_that(&p.bin("foo"), existing_file()); - - assert_that(cargo::util::process(&p.bin("foo")).unwrap(), - execs().with_stdout("test passed\n").with_status(0)); - - println!("cleaning"); - assert_that(p.cargo("clean"), - execs().with_stdout("").with_status(0)); - println!("building baz"); - assert_that(p.cargo("build").arg("-p").arg("baz"), - execs().with_status(0) - .with_stdout(&format!("{} baz v0.5.0 ({})\n", - COMPILING, p.url()))); - println!("building foo"); - assert_that(p.cargo("build") - .arg("-p").arg("foo"), - execs().with_status(0) - .with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url()))); -}); - -test!(cargo_compile_with_root_dev_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dev-dependencies.bar] - - version = "0.5.0" - path = "../bar" - - [[bin]] - name = "foo" - "#) - .file("src/main.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])); - let p2 = project("bar") - .file("Cargo.toml", r#" - [package] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", r#" - pub fn gimme() -> &'static str { - "zoidberg" - } - "#); - - p2.build(); - assert_that(p.cargo_process("build"), - execs().with_status(101)) -}); - -test!(cargo_compile_with_root_dev_deps_with_testing { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dev-dependencies.bar] - - version = "0.5.0" - path = "../bar" - - [[bin]] - name = "foo" - "#) - .file("src/main.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])); - let p2 = project("bar") - .file("Cargo.toml", r#" - [package] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", r#" - pub fn gimme() -> &'static str { - "zoidberg" - } - "#); - - p2.build(); - assert_that(p.cargo_process("test"), - execs().with_stdout(&format!("\ -{compiling} [..] v0.5.0 ({url}) -{compiling} [..] v0.5.0 ({url}) -{running} target[..]foo-[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, url = p.url(), running = RUNNING))); -}); - -test!(cargo_compile_with_transitive_dev_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "bar" - - [[bin]] - - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dev-dependencies.baz] - - git = "git://example.com/path/to/nowhere" - - [lib] - - name = "bar" - "#) - .file("bar/src/bar.rs", r#" - pub fn gimme() -> &'static str { - "zoidberg" - } - "#); - - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url()))); - - assert_that(&p.bin("foo"), existing_file()); - - assert_that(cargo::util::process(&p.bin("foo")).unwrap(), - execs().with_stdout("zoidberg\n")); -}); - -test!(no_rebuild_dependency { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [[bin]] name = "foo" - [dependencies.bar] path = "bar" - "#) - .file("src/foo.rs", r#" - extern crate bar; - fn main() { bar::bar() } - "#) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] name = "bar" - "#) - .file("bar/src/bar.rs", r#" - pub fn bar() {} - "#); - // First time around we should compile both foo and bar - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url()))); - // This time we shouldn't compile bar - assert_that(p.cargo("build"), - execs().with_stdout("")); - p.root().move_into_the_past().unwrap(); - - p.build(); // rebuild the files (rewriting them in the process) - assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url()))); -}); - -test!(deep_dependencies_trigger_rebuild { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [[bin]] - name = "foo" - [dependencies.bar] - path = "bar" - "#) - .file("src/foo.rs", r#" - extern crate bar; - fn main() { bar::bar() } - "#) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - name = "bar" - [dependencies.baz] - path = "../baz" - "#) - .file("bar/src/bar.rs", r#" - extern crate baz; - pub fn bar() { baz::baz() } - "#) - .file("baz/Cargo.toml", r#" - [project] - - name = "baz" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - name = "baz" - "#) - .file("baz/src/baz.rs", r#" - pub fn baz() {} - "#); - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ - {} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url(), - COMPILING, p.url()))); - assert_that(p.cargo("build"), - execs().with_stdout("")); - - // Make sure an update to baz triggers a rebuild of bar - // - // We base recompilation off mtime, so sleep for at least a second to ensure - // that this write will change the mtime. - thread::sleep_ms(1000); - File::create(&p.root().join("baz/src/baz.rs")).unwrap().write_all(br#" - pub fn baz() { println!("hello!"); } - "#).unwrap(); - assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ - {} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url(), - COMPILING, p.url()))); - - // Make sure an update to bar doesn't trigger baz - thread::sleep_ms(1000); - File::create(&p.root().join("bar/src/bar.rs")).unwrap().write_all(br#" - extern crate baz; - pub fn bar() { println!("hello!"); baz::baz(); } - "#).unwrap(); - assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url()))); - -}); - -test!(no_rebuild_two_deps { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [[bin]] - name = "foo" - [dependencies.bar] - path = "bar" - [dependencies.baz] - path = "baz" - "#) - .file("src/foo.rs", r#" - extern crate bar; - fn main() { bar::bar() } - "#) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - name = "bar" - [dependencies.baz] - path = "../baz" - "#) - .file("bar/src/bar.rs", r#" - pub fn bar() {} - "#) - .file("baz/Cargo.toml", r#" - [project] - - name = "baz" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - name = "baz" - "#) - .file("baz/src/baz.rs", r#" - pub fn baz() {} - "#); - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} baz v0.5.0 ({})\n\ - {} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url(), - COMPILING, p.url()))); - assert_that(&p.bin("foo"), existing_file()); - assert_that(p.cargo("build"), - execs().with_stdout("")); - assert_that(&p.bin("foo"), existing_file()); -}); - -test!(nested_deps_recompile { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "src/bar" - - [[bin]] - - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("src/bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "bar" - "#) - .file("src/bar/src/bar.rs", "pub fn gimme() {}"); - let bar = p.url(); - - assert_that(p.cargo_process("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, bar, - COMPILING, p.url()))); - p.root().move_into_the_past().unwrap(); - - File::create(&p.root().join("src/foo.rs")).unwrap().write_all(br#" - fn main() {} - "#).unwrap(); - - // This shouldn't recompile `bar` - assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} foo v0.5.0 ({})\n", - COMPILING, p.url()))); -}); - -test!(error_message_for_missing_manifest { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - path = "src/bar" - - [lib] - - name = "foo" - "#) - .file("src/bar/not-a-manifest", ""); - - assert_that(p.cargo_process("build"), - execs() - .with_status(101) - .with_stderr(&format!("Could not find `Cargo.toml` in `{}`\n", - p.root().join("src").join("bar").display()))); - -}); - -test!(override_relative { - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", ""); - - fs::create_dir(&paths::root().join(".cargo")).unwrap(); - File::create(&paths::root().join(".cargo/config")).unwrap() - .write_all(br#"paths = ["bar"]"#).unwrap(); - - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [package] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = '{}' - "#, bar.root().display())) - .file("src/lib.rs", ""); - bar.build(); - assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); - -}); - -test!(override_self { - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("src/lib.rs", ""); - - let p = project("foo"); - let root = p.root().clone(); - let p = p - .file(".cargo/config", &format!(r#" - paths = ['{}'] - "#, root.display())) - .file("Cargo.toml", &format!(r#" - [package] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = '{}' - - "#, bar.root().display())) - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}"); - - bar.build(); - assert_that(p.cargo_process("build"), execs().with_status(0)); - -}); - -test!(override_path_dep { - let bar = project("bar") - .file("p1/Cargo.toml", r#" - [package] - name = "p1" - version = "0.5.0" - authors = [] - - [dependencies.p2] - path = "../p2" - "#) - .file("p1/src/lib.rs", "") - .file("p2/Cargo.toml", r#" - [package] - name = "p2" - version = "0.5.0" - authors = [] - "#) - .file("p2/src/lib.rs", ""); - - let p = project("foo") - .file(".cargo/config", &format!(r#" - paths = ['{}', '{}'] - "#, bar.root().join("p1").display(), - bar.root().join("p2").display())) - .file("Cargo.toml", &format!(r#" - [package] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.p2] - path = '{}' - - "#, bar.root().join("p2").display())) - .file("src/lib.rs", ""); - - bar.build(); - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); - -}); - -test!(path_dep_build_cmd { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "bar" - - [[bin]] - - name = "foo" - "#) - .file("src/foo.rs", - &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - - [lib] - - name = "bar" - "#) - .file("bar/build.rs", r#" - use std::fs; - fn main() { - fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); - } - "#) - .file("bar/src/bar.rs.in", r#" - pub fn gimme() -> i32 { 0 } - "#); - - p.build(); - p.root().join("bar").move_into_the_past().unwrap(); - - assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url()))); - - assert_that(&p.bin("foo"), existing_file()); - - assert_that(cargo::util::process(&p.bin("foo")).unwrap(), - execs().with_stdout("0\n")); - - // Touching bar.rs.in should cause the `build` command to run again. - { - let file = fs::File::create(&p.root().join("bar/src/bar.rs.in")); - file.unwrap().write_all(br#"pub fn gimme() -> i32 { 1 }"#).unwrap(); - } - - assert_that(p.cargo("build"), - execs().with_stdout(&format!("{} bar v0.5.0 ({})\n\ - {} foo v0.5.0 ({})\n", - COMPILING, p.url(), - COMPILING, p.url()))); - - assert_that(cargo::util::process(&p.bin("foo")).unwrap(), - execs().with_stdout("1\n")); -}); - -test!(dev_deps_no_rebuild_lib { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dev-dependencies.bar] - path = "bar" - - [lib] - name = "foo" - doctest = false - "#) - .file("src/lib.rs", r#" - #[cfg(test)] extern crate bar; - #[cfg(not(test))] fn foo() { env!("FOO"); } - "#) - .file("bar/Cargo.toml", r#" - [package] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - "#) - .file("bar/src/lib.rs", "pub fn bar() {}"); - p.build(); - assert_that(p.cargo("build") - .env("FOO", "bar"), - execs().with_status(0) - .with_stdout(&format!("{} foo v0.5.0 ({})\n", - COMPILING, p.url()))); - - assert_that(p.cargo("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{} [..] v0.5.0 ({}) -{} [..] v0.5.0 ({}) -Running target[..]foo-[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", COMPILING, p.url(), COMPILING, p.url()))); -}); - -test!(custom_target_no_rebuild { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies] - a = { path = "a" } - "#) - .file("src/lib.rs", "") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - "#) - .file("a/src/lib.rs", "") - .file("b/Cargo.toml", r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies] - a = { path = "../a" } - "#) - .file("b/src/lib.rs", ""); - p.build(); - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} a v0.5.0 ([..]) -{compiling} foo v0.5.0 ([..]) -", compiling = COMPILING))); - - assert_that(p.cargo("build") - .arg("--manifest-path=b/Cargo.toml") - .env("CARGO_TARGET_DIR", "target"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} b v0.5.0 ([..]) -", compiling = COMPILING))); -}); - -test!(override_and_depend { - let p = project("foo") - .file("a/a1/Cargo.toml", r#" - [project] - name = "a1" - version = "0.5.0" - authors = [] - [dependencies] - a2 = { path = "../a2" } - "#) - .file("a/a1/src/lib.rs", "") - .file("a/a2/Cargo.toml", r#" - [project] - name = "a2" - version = "0.5.0" - authors = [] - "#) - .file("a/a2/src/lib.rs", "") - .file("b/Cargo.toml", r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies] - a1 = { path = "../a/a1" } - a2 = { path = "../a/a2" } - "#) - .file("b/src/lib.rs", "") - .file("b/.cargo/config", r#" - paths = ["../a"] - "#); - p.build(); - assert_that(p.cargo("build").cwd(p.root().join("b")), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} a2 v0.5.0 ([..]) -{compiling} a1 v0.5.0 ([..]) -{compiling} b v0.5.0 ([..]) -", compiling = COMPILING))); -}); diff --git a/tests/test_cargo_compile_plugins.rs b/tests/test_cargo_compile_plugins.rs deleted file mode 100644 index 3d90bd10645..00000000000 --- a/tests/test_cargo_compile_plugins.rs +++ /dev/null @@ -1,272 +0,0 @@ -use std::fs; -use std::env; - -use support::{project, execs}; -use support::{COMPILING, RUNNING}; -use hamcrest::assert_that; - -fn setup() { -} - -test!(plugin_to_the_max { - if !::is_nightly() { return } - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo_lib" - - [dependencies.bar] - path = "../bar" - "#) - .file("src/main.rs", r#" - #![feature(plugin)] - #![plugin(bar)] - extern crate foo_lib; - - fn main() { foo_lib::foo(); } - "#) - .file("src/foo_lib.rs", r#" - #![feature(plugin)] - #![plugin(bar)] - - pub fn foo() {} - "#); - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - plugin = true - - [dependencies.baz] - path = "../baz" - "#) - .file("src/lib.rs", r#" - #![feature(plugin_registrar, rustc_private)] - - extern crate rustc; - extern crate baz; - - use rustc::plugin::Registry; - - #[plugin_registrar] - pub fn foo(_reg: &mut Registry) { - println!("{}", baz::baz()); - } - "#); - let baz = project("baz") - .file("Cargo.toml", r#" - [package] - name = "baz" - version = "0.0.1" - authors = [] - - [lib] - name = "baz" - crate_type = ["dylib"] - "#) - .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); - bar.build(); - baz.build(); - - assert_that(foo.cargo_process("build"), - execs().with_status(0)); - assert_that(foo.cargo("doc"), - execs().with_status(0)); -}); - -test!(plugin_with_dynamic_native_dependency { - if !::is_nightly() { return } - - let build = project("builder") - .file("Cargo.toml", r#" - [package] - name = "builder" - version = "0.0.1" - authors = [] - - [lib] - name = "builder" - crate-type = ["dylib"] - "#) - .file("src/lib.rs", r#" - #[no_mangle] - pub extern fn foo() {} - "#); - assert_that(build.cargo_process("build"), - execs().with_status(0)); - let src = build.root().join("target/debug"); - let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| { - let lib = lib.file_name().unwrap().to_str().unwrap(); - lib.starts_with(env::consts::DLL_PREFIX) && - lib.ends_with(env::consts::DLL_SUFFIX) - }).unwrap(); - let libname = lib.file_name().unwrap().to_str().unwrap(); - let libname = &libname[env::consts::DLL_PREFIX.len().. - libname.len() - env::consts::DLL_SUFFIX.len()]; - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", r#" - #![feature(plugin)] - #![plugin(bar)] - - fn main() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - build = 'build.rs' - - [lib] - name = "bar" - plugin = true - "#) - .file("bar/build.rs", r#" - use std::path::PathBuf; - use std::env; - - fn main() { - let src = PathBuf::from(env::var("SRC").unwrap()); - println!("cargo:rustc-flags=-L {}", src.parent().unwrap() - .display()); - } - "#) - .file("bar/src/lib.rs", &format!(r#" - #![feature(plugin_registrar, rustc_private)] - extern crate rustc; - - use rustc::plugin::Registry; - - #[link(name = "{}")] - extern {{ fn foo(); }} - - #[plugin_registrar] - pub fn bar(_reg: &mut Registry) {{ - unsafe {{ foo() }} - }} - "#, libname)); - - assert_that(foo.cargo_process("build").env("SRC", &lib), - execs().with_status(0)); -}); - -test!(plugin_integration { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [lib] - name = "foo" - plugin = true - doctest = false - "#) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .file("tests/it_works.rs", ""); - - assert_that(p.cargo_process("test"), - execs().with_status(0)); -}); - -test!(doctest_a_plugin { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar" } - "#) - .file("src/lib.rs", r#" - #[macro_use] - extern crate bar; - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - plugin = true - "#) - .file("bar/src/lib.rs", r#" - pub fn bar() {} - "#); - - assert_that(p.cargo_process("test").arg("-v"), - execs().with_status(0)); -}); - -// See #1515 -test!(native_plugin_dependency_with_custom_ar_linker { - let target = ::rustc_host(); - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - plugin = true - "#) - .file("src/lib.rs", ""); - - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - - [dependencies.foo] - path = "../foo" - "#) - .file("src/lib", "") - .file(".cargo/config", &format!(r#" - [target.{}] - ar = "nonexistent-ar" - linker = "nonexistent-linker" - "#, target)); - - foo.build(); - assert_that(bar.cargo_process("build").arg("--verbose"), - execs().with_stdout(&format!("\ -{compiling} foo v0.0.1 ({url}) -{running} `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` -", compiling = COMPILING, running = RUNNING, url = bar.url()))) -}); diff --git a/tests/test_cargo_cross_compile.rs b/tests/test_cargo_cross_compile.rs deleted file mode 100644 index a7c42b216cd..00000000000 --- a/tests/test_cargo_cross_compile.rs +++ /dev/null @@ -1,866 +0,0 @@ -use std::env; - -use support::{project, execs, basic_bin_manifest}; -use support::{RUNNING, COMPILING, DOCTEST}; -use hamcrest::{assert_that, existing_file}; -use cargo::util::process; - -fn setup() { -} - -fn disabled() -> bool { - // First, disable if ./configure requested so - match env::var("CFG_DISABLE_CROSS_TESTS") { - Ok(ref s) if *s == "1" => return true, - _ => {} - } - - // Right now the windows bots cannot cross compile due to the mingw setup, - // so we disable ourselves on all but macos/linux setups where the rustc - // install script ensures we have both architectures - !(cfg!(target_os = "macos") || - cfg!(target_os = "linux") || - cfg!(target_env = "msvc")) -} - -fn alternate() -> String { - let platform = match env::consts::OS { - "linux" => "unknown-linux-gnu", - "macos" => "apple-darwin", - "windows" => "pc-windows-msvc", - _ => unreachable!(), - }; - let arch = match env::consts::ARCH { - "x86" => "x86_64", - "x86_64" => "i686", - _ => unreachable!(), - }; - format!("{}-{}", arch, platform) -} - -fn alternate_arch() -> &'static str { - match env::consts::ARCH { - "x86" => "x86_64", - "x86_64" => "x86", - _ => unreachable!(), - } -} - -test!(simple_cross { - if disabled() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - "#) - .file("build.rs", &format!(r#" - fn main() {{ - assert_eq!(std::env::var("TARGET").unwrap(), "{}"); - }} - "#, alternate())) - .file("src/main.rs", &format!(r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, alternate_arch())); - - let target = alternate(); - assert_that(p.cargo_process("build").arg("--target").arg(&target), - execs().with_status(0)); - assert_that(&p.target_bin(&target, "foo"), existing_file()); - - assert_that(process(&p.target_bin(&target, "foo")).unwrap(), - execs().with_status(0)); -}); - -test!(simple_deps { - if disabled() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#) - .file("src/main.rs", r#" - extern crate bar; - fn main() { bar::bar(); } - "#); - let p2 = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "pub fn bar() {}"); - p2.build(); - - let target = alternate(); - assert_that(p.cargo_process("build").arg("--target").arg(&target), - execs().with_status(0)); - assert_that(&p.target_bin(&target, "foo"), existing_file()); - - assert_that(process(&p.target_bin(&target, "foo")).unwrap(), - execs().with_status(0)); -}); - -test!(plugin_deps { - if disabled() { return } - if !::is_nightly() { return } - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - - [dependencies.baz] - path = "../baz" - "#) - .file("src/main.rs", r#" - #![feature(plugin)] - #![plugin(bar)] - extern crate baz; - fn main() { - assert_eq!(bar!(), baz::baz()); - } - "#); - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - plugin = true - "#) - .file("src/lib.rs", r#" - #![feature(plugin_registrar, quote, rustc_private)] - - extern crate rustc; - extern crate syntax; - - use rustc::plugin::Registry; - use syntax::ast::TokenTree; - use syntax::codemap::Span; - use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; - - #[plugin_registrar] - pub fn foo(reg: &mut Registry) { - reg.register_macro("bar", expand_bar); - } - - fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) - -> Box { - MacEager::expr(quote_expr!(cx, 1)) - } - "#); - let baz = project("baz") - .file("Cargo.toml", r#" - [package] - name = "baz" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); - bar.build(); - baz.build(); - - let target = alternate(); - assert_that(foo.cargo_process("build").arg("--target").arg(&target), - execs().with_status(0)); - assert_that(&foo.target_bin(&target, "foo"), existing_file()); - - assert_that(process(&foo.target_bin(&target, "foo")).unwrap(), - execs().with_status(0)); -}); - -test!(plugin_to_the_max { - if disabled() { return } - if !::is_nightly() { return } - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - - [dependencies.baz] - path = "../baz" - "#) - .file("src/main.rs", r#" - #![feature(plugin)] - #![plugin(bar)] - extern crate baz; - fn main() { - assert_eq!(bar!(), baz::baz()); - } - "#); - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - plugin = true - - [dependencies.baz] - path = "../baz" - "#) - .file("src/lib.rs", r#" - #![feature(plugin_registrar, quote, rustc_private)] - - extern crate rustc; - extern crate syntax; - extern crate baz; - - use rustc::plugin::Registry; - use syntax::ast::TokenTree; - use syntax::codemap::Span; - use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; - - #[plugin_registrar] - pub fn foo(reg: &mut Registry) { - reg.register_macro("bar", expand_bar); - } - - fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) - -> Box { - MacEager::expr(quote_expr!(cx, baz::baz())) - } - "#); - let baz = project("baz") - .file("Cargo.toml", r#" - [package] - name = "baz" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); - bar.build(); - baz.build(); - - let target = alternate(); - assert_that(foo.cargo_process("build").arg("--target").arg(&target).arg("-v"), - execs().with_status(0)); - println!("second"); - assert_that(foo.cargo("build").arg("-v") - .arg("--target").arg(&target), - execs().with_status(0)); - assert_that(&foo.target_bin(&target, "foo"), existing_file()); - - assert_that(process(&foo.target_bin(&target, "foo")).unwrap(), - execs().with_status(0)); -}); - -test!(linker_and_ar { - if disabled() { return } - - let target = alternate(); - let p = project("foo") - .file(".cargo/config", &format!(r#" - [target.{}] - ar = "my-ar-tool" - linker = "my-linker-tool" - "#, target)) - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &format!(r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, alternate_arch())); - - assert_that(p.cargo_process("build").arg("--target").arg(&target) - .arg("-v"), - execs().with_status(101) - .with_stdout(&format!("\ -{compiling} foo v0.5.0 ({url}) -{running} `rustc src[..]foo.rs --crate-name foo --crate-type bin -g \ - --out-dir {dir}[..]target[..]{target}[..]debug \ - --emit=dep-info,link \ - --target {target} \ - -C ar=my-ar-tool -C linker=my-linker-tool \ - -L dependency={dir}[..]target[..]{target}[..]debug \ - -L dependency={dir}[..]target[..]{target}[..]debug[..]deps` -", - running = RUNNING, - compiling = COMPILING, - dir = p.root().display(), - url = p.url(), - target = target, - ))); -}); - -test!(plugin_with_extra_dylib_dep { - if disabled() { return } - if !::is_nightly() { return } - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#) - .file("src/main.rs", r#" - #![feature(plugin)] - #![plugin(bar)] - - fn main() {} - "#); - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - plugin = true - - [dependencies.baz] - path = "../baz" - "#) - .file("src/lib.rs", r#" - #![feature(plugin_registrar, rustc_private)] - - extern crate rustc; - extern crate baz; - - use rustc::plugin::Registry; - - #[plugin_registrar] - pub fn foo(reg: &mut Registry) { - println!("{}", baz::baz()); - } - "#); - let baz = project("baz") - .file("Cargo.toml", r#" - [package] - name = "baz" - version = "0.0.1" - authors = [] - - [lib] - name = "baz" - crate_type = ["dylib"] - "#) - .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); - bar.build(); - baz.build(); - - let target = alternate(); - assert_that(foo.cargo_process("build").arg("--target").arg(&target), - execs().with_status(0)); -}); - -test!(cross_tests { - if disabled() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "bar" - "#) - .file("src/main.rs", &format!(r#" - extern crate foo; - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - #[test] fn test() {{ main() }} - "#, alternate_arch())) - .file("src/lib.rs", &format!(r#" - use std::env; - pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }} - #[test] fn test_foo() {{ foo() }} - "#, alternate_arch())); - - let target = alternate(); - assert_that(p.cargo_process("test").arg("--target").arg(&target), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.0 ({foo}) -{running} target[..]{triple}[..]bar-[..] - -running 1 test -test test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]{triple}[..]foo-[..] - -running 1 test -test test_foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, foo = p.url(), triple = target, - doctest = DOCTEST))); -}); - -test!(simple_cargo_run { - if disabled() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - "#) - .file("src/main.rs", &format!(r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, alternate_arch())); - - let target = alternate(); - assert_that(p.cargo_process("run").arg("--target").arg(&target), - execs().with_status(0)); -}); - -test!(cross_with_a_build_script { - if disabled() { return } - - let target = alternate(); - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = 'build.rs' - "#) - .file("build.rs", &format!(r#" - use std::env; - use std::path::PathBuf; - fn main() {{ - assert_eq!(env::var("TARGET").unwrap(), "{0}"); - let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out"); - path.pop(); - assert!(path.file_name().unwrap().to_str().unwrap() - .starts_with("foo-")); - path.pop(); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build"); - path.pop(); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug"); - path.pop(); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}"); - path.pop(); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target"); - }} - "#, target)) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.0 (file://[..]) -{running} `rustc build.rs [..] --out-dir {dir}[..]target[..]build[..]foo-[..]` -{running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` -{running} `rustc src[..]main.rs [..] --target {target} [..]` -", compiling = COMPILING, running = RUNNING, target = target, - dir = p.root().display()))); -}); - -test!(build_script_needed_for_host_and_target { - if disabled() { return } - - let target = alternate(); - let host = ::rustc_host(); - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = 'build.rs' - - [dependencies.d1] - path = "d1" - [build-dependencies.d2] - path = "d2" - "#) - - .file("build.rs", r#" - extern crate d2; - fn main() { d2::d2(); } - "#) - .file("src/main.rs", " - extern crate d1; - fn main() { d1::d1(); } - ") - .file("d1/Cargo.toml", r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - build = 'build.rs' - "#) - .file("d1/src/lib.rs", " - pub fn d1() {} - ") - .file("d1/build.rs", r#" - use std::env; - fn main() { - let target = env::var("TARGET").unwrap(); - println!("cargo:rustc-flags=-L /path/to/{}", target); - } - "#) - .file("d2/Cargo.toml", r#" - [package] - name = "d2" - version = "0.0.0" - authors = [] - - [dependencies.d1] - path = "../d1" - "#) - .file("d2/src/lib.rs", " - extern crate d1; - pub fn d2() { d1::d1(); } - "); - - assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} d1 v0.0.0 ({url}) -{running} `rustc d1[..]build.rs [..] --out-dir {dir}[..]target[..]build[..]d1-[..]` -{running} `{dir}[..]target[..]build[..]d1-[..]build-script-build` -{running} `{dir}[..]target[..]build[..]d1-[..]build-script-build` -{running} `rustc d1[..]src[..]lib.rs [..] --target {target} [..] \ - -L /path/to/{target}` -{running} `rustc d1[..]src[..]lib.rs [..] \ - -L /path/to/{host}` -{compiling} d2 v0.0.0 ({url}) -{running} `rustc d2[..]src[..]lib.rs [..] \ - -L /path/to/{host}` -{compiling} foo v0.0.0 ({url}) -{running} `rustc build.rs [..] --out-dir {dir}[..]target[..]build[..]foo-[..] \ - -L /path/to/{host}` -{running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` -{running} `rustc src[..]main.rs [..] --target {target} [..] \ - -L /path/to/{target}` -", compiling = COMPILING, running = RUNNING, target = target, host = host, - url = p.url(), - dir = p.root().display()))); -}); - -test!(build_deps_for_the_right_arch { - if disabled() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.d2] - path = "d2" - "#) - .file("src/main.rs", "extern crate d2; fn main() {}") - .file("d1/Cargo.toml", r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - "#) - .file("d1/src/lib.rs", " - pub fn d1() {} - ") - .file("d2/Cargo.toml", r#" - [package] - name = "d2" - version = "0.0.0" - authors = [] - build = "build.rs" - - [build-dependencies.d1] - path = "../d1" - "#) - .file("d2/build.rs", "extern crate d1; fn main() {}") - .file("d2/src/lib.rs", ""); - - let target = alternate(); - assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), - execs().with_status(0)); -}); - -test!(build_script_only_host { - if disabled() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - - [build-dependencies.d1] - path = "d1" - "#) - .file("src/main.rs", "fn main() {}") - .file("build.rs", "extern crate d1; fn main() {}") - .file("d1/Cargo.toml", r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - build = "build.rs" - "#) - .file("d1/src/lib.rs", " - pub fn d1() {} - ") - .file("d1/build.rs", r#" - use std::env; - - fn main() { - assert!(env::var("OUT_DIR").unwrap().replace("\\", "/") - .contains("target/debug/build/d1-"), - "bad: {:?}", env::var("OUT_DIR")); - } - "#); - - let target = alternate(); - assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), - execs().with_status(0)); -}); - -test!(plugin_build_script_right_arch { - if disabled() { return } - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [lib] - name = "foo" - plugin = true - "#) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(alternate()), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ([..]) -{running} `rustc build.rs [..]` -{running} `[..]build-script-build[..]` -{running} `rustc src[..]lib.rs [..]` -", compiling = COMPILING, running = RUNNING))); -}); - -test!(build_script_with_platform_specific_dependencies { - if disabled() { return } - - let target = alternate(); - let host = ::rustc_host(); - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [build-dependencies.d1] - path = "d1" - "#) - .file("build.rs", "extern crate d1; fn main() {}") - .file("src/lib.rs", "") - .file("d1/Cargo.toml", &format!(r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - - [target.{}.dependencies] - d2 = {{ path = "../d2" }} - "#, host)) - .file("d1/src/lib.rs", "extern crate d2;") - .file("d2/Cargo.toml", r#" - [package] - name = "d2" - version = "0.0.0" - authors = [] - "#) - .file("d2/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} d2 v0.0.0 ([..]) -{running} `rustc d2[..]src[..]lib.rs [..]` -{compiling} d1 v0.0.0 ([..]) -{running} `rustc d1[..]src[..]lib.rs [..]` -{compiling} foo v0.0.1 ([..]) -{running} `rustc build.rs [..]` -{running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` -{running} `rustc src[..]lib.rs [..] --target {target} [..]` -", compiling = COMPILING, running = RUNNING, dir = p.root().display(), target = target))); -}); - -test!(platform_specific_dependencies_do_not_leak { - if disabled() { return } - - let target = alternate(); - let host = ::rustc_host(); - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [dependencies.d1] - path = "d1" - - [build-dependencies.d1] - path = "d1" - "#) - .file("build.rs", "extern crate d1; fn main() {}") - .file("src/lib.rs", "") - .file("d1/Cargo.toml", &format!(r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - - [target.{}.dependencies] - d2 = {{ path = "../d2" }} - "#, host)) - .file("d1/src/lib.rs", "extern crate d2;") - .file("d2/Cargo.toml", r#" - [package] - name = "d2" - version = "0.0.0" - authors = [] - "#) - .file("d2/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), - execs().with_status(101) - .with_stderr("\ -[..] error: can't find crate for `d2` -[..] extern crate d2; -[..] -error: aborting due to previous error -Could not compile `d1`. - -Caused by: - [..] -")); -}); - -test!(platform_specific_variables_reflected_in_build_scripts { - if disabled() { return } - - let target = alternate(); - let host = ::rustc_host(); - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [target.{host}.dependencies] - d1 = {{ path = "d1" }} - - [target.{target}.dependencies] - d2 = {{ path = "d2" }} - "#, host = host, target = target)) - .file("build.rs", &format!(r#" - use std::env; - - fn main() {{ - let platform = env::var("TARGET").unwrap(); - let (expected, not_expected) = match &platform[..] {{ - "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"), - "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"), - _ => panic!("unknown platform") - }}; - - env::var(expected).unwrap(); - env::var(not_expected).unwrap_err(); - }} - "#, host = host, target = target)) - .file("src/lib.rs", "") - .file("d1/Cargo.toml", r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - links = "d1" - build = "build.rs" - "#) - .file("d1/build.rs", r#" - fn main() { println!("cargo:val=1") } - "#) - .file("d1/src/lib.rs", "") - .file("d2/Cargo.toml", r#" - [package] - name = "d2" - version = "0.0.0" - authors = [] - links = "d2" - build = "build.rs" - "#) - .file("d2/build.rs", r#" - fn main() { println!("cargo:val=1") } - "#) - .file("d2/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); - assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), - execs().with_status(0)); -}); \ No newline at end of file diff --git a/tests/test_cargo_doc.rs b/tests/test_cargo_doc.rs deleted file mode 100644 index 7464e3774b9..00000000000 --- a/tests/test_cargo_doc.rs +++ /dev/null @@ -1,291 +0,0 @@ -use support::{project, execs, path2url}; -use support::COMPILING; -use hamcrest::{assert_that, existing_file, existing_dir, is_not}; - -fn setup() { -} - -test!(simple { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", r#" - pub fn foo() {} - "#); - - assert_that(p.cargo_process("doc"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -", - compiling = COMPILING, - dir = path2url(p.root())))); - assert_that(&p.root().join("target/doc"), existing_dir()); - assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); -}); - -test!(doc_no_libs { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "foo" - doc = false - "#) - .file("src/main.rs", r#" - bad code - "#); - - assert_that(p.cargo_process("doc"), - execs().with_status(0)); -}); - -test!(doc_twice { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", r#" - pub fn foo() {} - "#); - - assert_that(p.cargo_process("doc"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -", - compiling = COMPILING, - dir = path2url(p.root())))); - - assert_that(p.cargo("doc"), - execs().with_status(0).with_stdout("")) -}); - -test!(doc_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#) - .file("src/lib.rs", r#" - extern crate bar; - pub fn foo() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", r#" - pub fn bar() {} - "#); - - assert_that(p.cargo_process("doc"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} bar v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", - compiling = COMPILING, - dir = path2url(p.root())))); - - assert_that(&p.root().join("target/doc"), existing_dir()); - assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); - assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); - - assert_that(p.cargo("doc") - .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"), - execs().with_status(0).with_stdout("")); - - assert_that(&p.root().join("target/doc"), existing_dir()); - assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); - assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); -}); - -test!(doc_no_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#) - .file("src/lib.rs", r#" - extern crate bar; - pub fn foo() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", r#" - pub fn bar() {} - "#); - - assert_that(p.cargo_process("doc").arg("--no-deps"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} bar v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", - compiling = COMPILING, - dir = path2url(p.root())))); - - assert_that(&p.root().join("target/doc"), existing_dir()); - assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); - assert_that(&p.root().join("target/doc/bar/index.html"), is_not(existing_file())); -}); - -test!(doc_only_bin { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", r#" - extern crate bar; - pub fn foo() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", r#" - pub fn bar() {} - "#); - - assert_that(p.cargo_process("doc").arg("-v"), - execs().with_status(0)); - - assert_that(&p.root().join("target/doc"), existing_dir()); - assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); - assert_that(&p.root().join("target/doc/foo/index.html"), existing_file()); -}); - -test!(doc_lib_bin_same_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", "fn foo() {}"); - - assert_that(p.cargo_process("doc"), - execs().with_status(101) - .with_stderr("\ -Cannot document a package where a library and a binary have the same name. \ -Consider renaming one or marking the target as `doc = false` -")); -}); - -test!(doc_dash_p { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "extern crate a;") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies.b] - path = "../b" - "#) - .file("a/src/lib.rs", "extern crate b;") - .file("b/Cargo.toml", r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - "#) - .file("b/src/lib.rs", ""); - - assert_that(p.cargo_process("doc").arg("-p").arg("a"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} b v0.0.1 (file://[..]) -{compiling} a v0.0.1 (file://[..]) -", compiling = COMPILING))); -}); - -test!(doc_same_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "") - .file("src/bin/main.rs", "fn main() {}") - .file("examples/main.rs", "fn main() {}") - .file("tests/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("doc"), - execs().with_status(0)); -}); - -test!(doc_target { - const TARGET: &'static str = "arm-unknown-linux-gnueabihf"; - - if !::is_nightly() { return } - - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", r#" - #![feature(no_core)] - #![no_core] - - extern { - pub static A: u32; - } - "#); - - assert_that(p.cargo_process("doc").arg("--target").arg(TARGET).arg("--verbose"), - execs().with_status(0)); - assert_that(&p.root().join(&format!("target/{}/doc", TARGET)), existing_dir()); - assert_that(&p.root().join(&format!("target/{}/doc/foo/index.html", TARGET)), existing_file()); -}); diff --git a/tests/test_cargo_features.rs b/tests/test_cargo_features.rs deleted file mode 100644 index 11211087b0b..00000000000 --- a/tests/test_cargo_features.rs +++ /dev/null @@ -1,809 +0,0 @@ -use std::fs::File; -use std::io::prelude::*; - -use support::{project, execs}; -use support::{COMPILING, FRESH}; -use support::paths::CargoPathExt; -use hamcrest::assert_that; - -fn setup() { -} - -test!(invalid1 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - bar = ["baz"] - "#) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - Feature `bar` includes `baz` which is neither a dependency nor another feature -"))); -}); - -test!(invalid2 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - bar = ["baz"] - - [dependencies.bar] - path = "foo" - "#) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - Features and dependencies cannot have the same name: `bar` -"))); -}); - -test!(invalid3 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - bar = ["baz"] - - [dependencies.baz] - path = "foo" - "#) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - Feature `bar` depends on `baz` which is not an optional dependency. -Consider adding `optional = true` to the dependency -"))); -}); - -test!(invalid4 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - features = ["bar"] - "#) - .file("src/main.rs", "") - .file("bar/Cargo.toml", r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr(&format!("\ -Package `bar v0.0.1 ([..])` does not have these features: `bar` -"))); - - let p = p.file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#); - - assert_that(p.cargo_process("build").arg("--features").arg("test"), - execs().with_status(101).with_stderr(&format!("\ -Package `foo v0.0.1 ([..])` does not have these features: `test` -"))); -}); - -test!(invalid5 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies.bar] - path = "bar" - optional = true - "#) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - Dev-dependencies are not allowed to be optional: `bar` -"))); -}); - -test!(invalid6 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ["bar/baz"] - "#) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build").arg("--features").arg("foo"), - execs().with_status(101).with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - Feature `foo` requires `bar` which is not an optional dependency -"))); -}); - -test!(invalid7 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ["bar/baz"] - bar = [] - "#) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build").arg("--features").arg("foo"), - execs().with_status(101).with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - Feature `foo` requires `bar` which is not an optional dependency -"))); -}); - -test!(invalid8 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - features = ["foo/bar"] - "#) - .file("src/main.rs", "") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", ""); - - assert_that(p.cargo_process("build").arg("--features").arg("foo"), - execs().with_status(101).with_stderr(&format!("\ -features in dependencies cannot enable features in other dependencies: `foo/bar` -"))); -}); - -test!(no_feature_doesnt_build { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - optional = true - "#) - .file("src/main.rs", r#" - #[cfg(feature = "bar")] - extern crate bar; - #[cfg(feature = "bar")] - fn main() { bar::bar(); println!("bar") } - #[cfg(not(feature = "bar"))] - fn main() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", "pub fn bar() {}"); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = p.url()))); - assert_that(p.process(&p.bin("foo")), - execs().with_status(0).with_stdout("")); - - assert_that(p.cargo("build").arg("--features").arg("bar"), - execs().with_status(0).with_stdout(format!("\ -{compiling} bar v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = p.url()))); - assert_that(p.process(&p.bin("foo")), - execs().with_status(0).with_stdout("bar\n")); -}); - -test!(default_feature_pulled_in { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["bar"] - - [dependencies.bar] - path = "bar" - optional = true - "#) - .file("src/main.rs", r#" - #[cfg(feature = "bar")] - extern crate bar; - #[cfg(feature = "bar")] - fn main() { bar::bar(); println!("bar") } - #[cfg(not(feature = "bar"))] - fn main() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", "pub fn bar() {}"); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} bar v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = p.url()))); - assert_that(p.process(&p.bin("foo")), - execs().with_status(0).with_stdout("bar\n")); - - assert_that(p.cargo("build").arg("--no-default-features"), - execs().with_status(0).with_stdout(format!("\ -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = p.url()))); - assert_that(p.process(&p.bin("foo")), - execs().with_status(0).with_stdout("")); -}); - -test!(cyclic_feature { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["default"] - "#) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr("\ -Cyclic feature dependency: feature `default` depends on itself -")); -}); - -test!(cyclic_feature2 { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ["bar"] - bar = ["foo"] - "#) - .file("src/main.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr("\ -Cyclic feature dependency: feature `[..]` depends on itself -")); -}); - -test!(groups_on_groups_on_groups { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["f1"] - f1 = ["f2", "bar"] - f2 = ["f3", "f4"] - f3 = ["f5", "f6", "baz"] - f4 = ["f5", "f7"] - f5 = ["f6"] - f6 = ["f7"] - f7 = ["bar"] - - [dependencies.bar] - path = "bar" - optional = true - - [dependencies.baz] - path = "baz" - optional = true - "#) - .file("src/main.rs", r#" - extern crate bar; - extern crate baz; - fn main() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", r#" - [package] - name = "baz" - version = "0.0.1" - authors = [] - "#) - .file("baz/src/lib.rs", "pub fn baz() {}"); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} ba[..] v0.0.1 ({dir}) -{compiling} ba[..] v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = p.url()))); -}); - -test!(many_cli_features { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - optional = true - - [dependencies.baz] - path = "baz" - optional = true - "#) - .file("src/main.rs", r#" - extern crate bar; - extern crate baz; - fn main() {} - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", r#" - [package] - name = "baz" - version = "0.0.1" - authors = [] - "#) - .file("baz/src/lib.rs", "pub fn baz() {}"); - - assert_that(p.cargo_process("build").arg("--features").arg("bar baz"), - execs().with_status(0).with_stdout(format!("\ -{compiling} ba[..] v0.0.1 ({dir}) -{compiling} ba[..] v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = p.url()))); -}); - -test!(union_features { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - features = ["f1"] - [dependencies.d2] - path = "d2" - features = ["f2"] - "#) - .file("src/main.rs", r#" - extern crate d1; - extern crate d2; - fn main() { - d2::f1(); - d2::f2(); - } - "#) - .file("d1/Cargo.toml", r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [features] - f1 = ["d2"] - - [dependencies.d2] - path = "../d2" - features = ["f1"] - optional = true - "#) - .file("d1/src/lib.rs", "") - .file("d2/Cargo.toml", r#" - [package] - name = "d2" - version = "0.0.1" - authors = [] - - [features] - f1 = [] - f2 = [] - "#) - .file("d2/src/lib.rs", r#" - #[cfg(feature = "f1")] pub fn f1() {} - #[cfg(feature = "f2")] pub fn f2() {} - "#); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} d2 v0.0.1 ({dir}) -{compiling} d1 v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = p.url()))); -}); - -test!(many_features_no_rebuilds { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "b" - version = "0.1.0" - authors = [] - - [dependencies.a] - path = "a" - features = ["fall"] - "#) - .file("src/main.rs", "fn main() {}") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.1.0" - authors = [] - - [features] - ftest = [] - ftest2 = [] - fall = ["ftest", "ftest2"] - "#) - .file("a/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} a v0.1.0 ({dir}) -{compiling} b v0.1.0 ({dir}) -", compiling = COMPILING, dir = p.url()))); - p.root().move_into_the_past().unwrap(); - - assert_that(p.cargo("build").arg("-v"), - execs().with_status(0).with_stdout(format!("\ -{fresh} a v0.1.0 ([..]) -{fresh} b v0.1.0 ([..]) -", fresh = FRESH))); -}); - -// Tests that all cmd lines work with `--features ""` -test!(empty_features { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("build").arg("--features").arg(""), - execs().with_status(0)); -}); - -// Tests that all cmd lines work with `--features ""` -test!(transitive_features { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ["bar/baz"] - - [dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", " - extern crate bar; - fn main() { bar::baz(); } - ") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - baz = [] - "#) - .file("bar/src/lib.rs", r#" - #[cfg(feature = "baz")] - pub fn baz() {} - "#); - - assert_that(p.cargo_process("build").arg("--features").arg("foo"), - execs().with_status(0)); -}); - -test!(everything_in_the_lockfile { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - f1 = ["d1/f1"] - f2 = ["d2"] - - [dependencies.d1] - path = "d1" - [dependencies.d2] - path = "d2" - optional = true - [dependencies.d3] - path = "d3" - optional = true - "#) - .file("src/main.rs", "fn main() {}") - .file("d1/Cargo.toml", r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [features] - f1 = [] - "#) - .file("d1/src/lib.rs", "") - .file("d2/Cargo.toml", r#" - [package] - name = "d2" - version = "0.0.2" - authors = [] - "#) - .file("d2/src/lib.rs", "") - .file("d3/Cargo.toml", r#" - [package] - name = "d3" - version = "0.0.3" - authors = [] - - [features] - f3 = [] - "#) - .file("d3/src/lib.rs", ""); - - assert_that(p.cargo_process("fetch"), execs().with_status(0)); - let loc = p.root().join("Cargo.lock"); - let mut lockfile = String::new(); - File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap(); - assert!(lockfile.contains(r#"name = "d1""#), "d1 not found\n{}", lockfile); - assert!(lockfile.contains(r#"name = "d2""#), "d2 not found\n{}", lockfile); - assert!(lockfile.contains(r#"name = "d3""#), "d3 not found\n{}", lockfile); -}); - -test!(no_rebuild_when_frobbing_default_feature { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = "a" } - b = { path = "b" } - "#) - .file("src/lib.rs", "") - .file("b/Cargo.toml", r#" - [package] - name = "b" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = "../a", features = ["f1"], default-features = false } - "#) - .file("b/src/lib.rs", "") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.1.0" - authors = [] - - [features] - default = ["f1"] - f1 = [] - "#) - .file("a/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); - assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); -}); - -test!(unions_work_with_no_default_features { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = "a" } - b = { path = "b" } - "#) - .file("src/lib.rs", r#" - extern crate a; - pub fn foo() { a::a(); } - "#) - .file("b/Cargo.toml", r#" - [package] - name = "b" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = "../a", features = [], default-features = false } - "#) - .file("b/src/lib.rs", "") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.1.0" - authors = [] - - [features] - default = ["f1"] - f1 = [] - "#) - .file("a/src/lib.rs", r#" - #[cfg(feature = "f1")] - pub fn a() {} - "#); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); - assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); -}); - -test!(optional_and_dev_dep { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { path = "foo", optional = true } - [dev-dependencies] - foo = { path = "foo" } - "#) - .file("src/lib.rs", "") - .file("foo/Cargo.toml", r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - "#) - .file("foo/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} test v0.1.0 ([..]) -", compiling = COMPILING))); -}); - -test!(activating_feature_activates_dep { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { path = "foo", optional = true } - - [features] - a = ["foo/a"] - "#) - .file("src/lib.rs", " - extern crate foo; - pub fn bar() { - foo::bar(); - } - ") - .file("foo/Cargo.toml", r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [features] - a = [] - "#) - .file("foo/src/lib.rs", r#" - #[cfg(feature = "a")] - pub fn bar() {} - "#); - - assert_that(p.cargo_process("build").arg("--features").arg("a").arg("-v"), - execs().with_status(0)); -}); diff --git a/tests/test_cargo_fetch.rs b/tests/test_cargo_fetch.rs deleted file mode 100644 index b0bf231df30..00000000000 --- a/tests/test_cargo_fetch.rs +++ /dev/null @@ -1,21 +0,0 @@ -use support::{project, execs}; -use hamcrest::assert_that; - -fn setup() {} - -test!(no_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#) - .file("src/main.rs", r#" - mod a; fn main() {} - "#) - .file("src/a.rs", ""); - - assert_that(p.cargo_process("fetch"), - execs().with_status(0).with_stdout("")); -}); diff --git a/tests/test_cargo_freshness.rs b/tests/test_cargo_freshness.rs deleted file mode 100644 index 1a79f14c9a0..00000000000 --- a/tests/test_cargo_freshness.rs +++ /dev/null @@ -1,205 +0,0 @@ -use std::fs::{self, File}; -use std::io::prelude::*; -use std::thread; - -use support::{project, execs, path2url}; -use support::COMPILING; -use support::paths::CargoPathExt; -use hamcrest::{assert_that, existing_file}; - -fn setup() {} - -test!(modifying_and_moving { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#) - .file("src/main.rs", r#" - mod a; fn main() {} - "#) - .file("src/a.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = path2url(p.root())))); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout("")); - p.root().move_into_the_past().unwrap(); - p.root().join("target").move_into_the_past().unwrap(); - - File::create(&p.root().join("src/a.rs")).unwrap() - .write_all(b"fn main() {}").unwrap(); - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = path2url(p.root())))); - - fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap(); - assert_that(p.cargo("build"), - execs().with_status(101)); -}); - -test!(modify_only_some_files { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#) - .file("src/lib.rs", "mod a;") - .file("src/a.rs", "") - .file("src/main.rs", r#" - mod b; - fn main() {} - "#) - .file("src/b.rs", "") - .file("tests/test.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(format!("\ -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = path2url(p.root())))); - assert_that(p.cargo("test"), - execs().with_status(0)); - thread::sleep_ms(1000); - - assert_that(&p.bin("foo"), existing_file()); - - let lib = p.root().join("src/lib.rs"); - let bin = p.root().join("src/b.rs"); - - File::create(&lib).unwrap().write_all(b"invalid rust code").unwrap(); - File::create(&bin).unwrap().write_all(b"fn foo() {}").unwrap(); - lib.move_into_the_past().unwrap(); - - // Make sure the binary is rebuilt, not the lib - assert_that(p.cargo("build") - .env("RUST_LOG", "cargo::ops::cargo_rustc::fingerprint"), - execs().with_status(0).with_stdout(format!("\ -{compiling} foo v0.0.1 ({dir}) -", compiling = COMPILING, dir = path2url(p.root())))); - assert_that(&p.bin("foo"), existing_file()); -}); - -test!(rebuild_sub_package_then_while_package { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [dependencies.a] - path = "a" - [dependencies.b] - path = "b" - "#) - .file("src/lib.rs", "extern crate a; extern crate b;") - .file("a/Cargo.toml", r#" - [package] - name = "a" - authors = [] - version = "0.0.1" - [dependencies.b] - path = "../b" - "#) - .file("a/src/lib.rs", "extern crate b;") - .file("b/Cargo.toml", r#" - [package] - name = "b" - authors = [] - version = "0.0.1" - "#) - .file("b/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0)); - - File::create(&p.root().join("b/src/lib.rs")).unwrap().write_all(br#" - pub fn b() {} - "#).unwrap(); - - assert_that(p.cargo("build").arg("-pb"), - execs().with_status(0)); - - File::create(&p.root().join("src/lib.rs")).unwrap().write_all(br#" - extern crate a; - extern crate b; - pub fn toplevel() {} - "#).unwrap(); - - assert_that(p.cargo("build"), - execs().with_status(0)); -}); - -test!(changing_features_is_ok { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [features] - foo = [] - "#) - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0) - .with_stdout("\ -[..]Compiling foo v0.0.1 ([..]) -")); - - assert_that(p.cargo("build").arg("--features").arg("foo"), - execs().with_status(0) - .with_stdout("\ -[..]Compiling foo v0.0.1 ([..]) -")); - - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout("\ -[..]Compiling foo v0.0.1 ([..]) -")); - - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout("")); -}); - -test!(rebuild_tests_if_lib_changes { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "pub fn foo() {}") - .file("tests/foo.rs", r#" - extern crate foo; - #[test] - fn test() { foo::foo(); } - "#); - - assert_that(p.cargo_process("build"), - execs().with_status(0)); - assert_that(p.cargo("test"), - execs().with_status(0)); - - File::create(&p.root().join("src/lib.rs")).unwrap(); - p.root().move_into_the_past().unwrap(); - p.root().join("target").move_into_the_past().unwrap(); - - assert_that(p.cargo("build"), - execs().with_status(0)); - assert_that(p.cargo("test").arg("-v"), - execs().with_status(101)); -}); diff --git a/tests/test_cargo_generate_lockfile.rs b/tests/test_cargo_generate_lockfile.rs deleted file mode 100644 index ddfa48ea73f..00000000000 --- a/tests/test_cargo_generate_lockfile.rs +++ /dev/null @@ -1,149 +0,0 @@ -use std::fs::File; -use std::io::prelude::*; - -use support::{project, execs}; -use hamcrest::assert_that; - -fn setup() {} - -test!(ignores_carriage_return { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#) - .file("src/main.rs", r#" - mod a; fn main() {} - "#) - .file("src/a.rs", ""); - - assert_that(p.cargo_process("build"), - execs().with_status(0)); - - let lockfile = p.root().join("Cargo.lock"); - let mut lock = String::new(); - File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); - let lock = lock.replace("\n", "\r\n"); - File::create(&lockfile).unwrap().write_all(lock.as_bytes()).unwrap(); - assert_that(p.cargo("build"), - execs().with_status(0)); -}); - -test!(adding_and_removing_packages { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - authors = [] - version = "0.0.1" - "#) - .file("bar/src/lib.rs", ""); - - assert_that(p.cargo_process("generate-lockfile"), - execs().with_status(0)); - - let lockfile = p.root().join("Cargo.lock"); - let toml = p.root().join("Cargo.toml"); - let mut lock1 = String::new(); - File::open(&lockfile).unwrap().read_to_string(&mut lock1).unwrap(); - - // add a dep - File::create(&toml).unwrap().write_all(br#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [dependencies.bar] - path = "bar" - "#).unwrap(); - assert_that(p.cargo("generate-lockfile"), - execs().with_status(0)); - let mut lock2 = String::new(); - File::open(&lockfile).unwrap().read_to_string(&mut lock2).unwrap(); - assert!(lock1 != lock2); - - // change the dep - File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#" - [package] - name = "bar" - authors = [] - version = "0.0.2" - "#).unwrap(); - assert_that(p.cargo("generate-lockfile"), - execs().with_status(0)); - let mut lock3 = String::new(); - File::open(&lockfile).unwrap().read_to_string(&mut lock3).unwrap(); - assert!(lock1 != lock3); - assert!(lock2 != lock3); - - // remove the dep - File::create(&toml).unwrap().write_all(br#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#).unwrap(); - assert_that(p.cargo("generate-lockfile"), - execs().with_status(0)); - let mut lock4 = String::new(); - File::open(&lockfile).unwrap().read_to_string(&mut lock4).unwrap(); - assert_eq!(lock1, lock4); -}); - -test!(preserve_metadata { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - authors = [] - version = "0.0.1" - "#) - .file("bar/src/lib.rs", ""); - - assert_that(p.cargo_process("generate-lockfile"), - execs().with_status(0)); - - let metadata = r#" -[metadata] -bar = "baz" -foo = "bar" -"#; - let lockfile = p.root().join("Cargo.lock"); - { - let mut lock = String::new(); - File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); - let data = lock + metadata; - File::create(&lockfile).unwrap().write_all(data.as_bytes()).unwrap(); - } - - // Build and make sure the metadata is still there - assert_that(p.cargo("build"), - execs().with_status(0)); - let mut lock = String::new(); - File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); - assert!(lock.contains(metadata.trim()), "{}", lock); - - // Update and make sure the metadata is still there - assert_that(p.cargo("update"), - execs().with_status(0)); - let mut lock = String::new(); - File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap(); - assert!(lock.contains(metadata.trim()), "{}", lock); -}); diff --git a/tests/test_cargo_new.rs b/tests/test_cargo_new.rs deleted file mode 100644 index 981b75a3218..00000000000 --- a/tests/test_cargo_new.rs +++ /dev/null @@ -1,270 +0,0 @@ -use std::fs::{self, File}; -use std::io::prelude::*; -use std::env; -use tempdir::TempDir; - -use support::{execs, paths, cargo_dir}; -use support::paths::CargoPathExt; -use hamcrest::{assert_that, existing_file, existing_dir, is_not}; - -use cargo::util::{process, ProcessBuilder}; - -fn setup() { -} - -fn my_process(s: &str) -> ProcessBuilder { - let mut p = process(s).unwrap(); - p.cwd(&paths::root()).env("HOME", &paths::home()); - return p; -} - -fn cargo_process(s: &str) -> ProcessBuilder { - let mut p = process(&cargo_dir().join("cargo")).unwrap(); - p.arg(s).cwd(&paths::root()).env("HOME", &paths::home()); - return p; -} - -test!(simple_lib { - assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("none") - .env("USER", "foo"), - execs().with_status(0)); - - assert_that(&paths::root().join("foo"), existing_dir()); - assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); - assert_that(&paths::root().join("foo/src/lib.rs"), existing_file()); - assert_that(&paths::root().join("foo/.gitignore"), is_not(existing_file())); - - assert_that(cargo_process("build").cwd(&paths::root().join("foo")), - execs().with_status(0)); -}); - -test!(simple_bin { - assert_that(cargo_process("new").arg("foo").arg("--bin") - .env("USER", "foo"), - execs().with_status(0)); - - assert_that(&paths::root().join("foo"), existing_dir()); - assert_that(&paths::root().join("foo/Cargo.toml"), existing_file()); - assert_that(&paths::root().join("foo/src/main.rs"), existing_file()); - - assert_that(cargo_process("build").cwd(&paths::root().join("foo")), - execs().with_status(0)); - assert_that(&paths::root().join(&format!("foo/target/debug/foo{}", - env::consts::EXE_SUFFIX)), - existing_file()); -}); - -test!(simple_git { - let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("new").arg("foo").cwd(td.path().clone()) - .env("USER", "foo"), - execs().with_status(0)); - - assert_that(td.path(), existing_dir()); - assert_that(&td.path().join("foo/Cargo.toml"), existing_file()); - assert_that(&td.path().join("foo/src/lib.rs"), existing_file()); - assert_that(&td.path().join("foo/.git"), existing_dir()); - assert_that(&td.path().join("foo/.gitignore"), existing_file()); - - assert_that(cargo_process("build").cwd(&td.path().clone().join("foo")), - execs().with_status(0)); -}); - -test!(no_argument { - assert_that(cargo_process("new"), - execs().with_status(1) - .with_stderr("\ -Invalid arguments. - -Usage: - cargo new [options] - cargo new -h | --help -")); -}); - -test!(existing { - let dst = paths::root().join("foo"); - fs::create_dir(&dst).unwrap(); - assert_that(cargo_process("new").arg("foo"), - execs().with_status(101) - .with_stderr(format!("Destination `{}` already exists\n", - dst.display()))); -}); - -test!(invalid_characters { - assert_that(cargo_process("new").arg("foo.rs"), - execs().with_status(101) - .with_stderr("Invalid character `.` in crate name: `foo.rs`")); -}); - -test!(rust_prefix_stripped { - assert_that(cargo_process("new").arg("rust-foo").env("USER", "foo"), - execs().with_status(0) - .with_stdout("note: package will be named `foo`; use --name to override")); - let toml = paths::root().join("rust-foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"name = "foo""#)); -}); - -test!(bin_disables_stripping { - assert_that(cargo_process("new").arg("rust-foo").arg("--bin").env("USER", "foo"), - execs().with_status(0)); - let toml = paths::root().join("rust-foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"name = "rust-foo""#)); -}); - -test!(explicit_name_not_stripped { - assert_that(cargo_process("new").arg("foo").arg("--name").arg("rust-bar").env("USER", "foo"), - execs().with_status(0)); - let toml = paths::root().join("foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"name = "rust-bar""#)); -}); - -test!(finds_author_user { - // Use a temp dir to make sure we don't pick up .cargo/config somewhere in - // the hierarchy - let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("new").arg("foo").env("USER", "foo") - .cwd(td.path().clone()), - execs().with_status(0)); - - let toml = td.path().join("foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"authors = ["foo"]"#)); -}); - -test!(finds_author_user_escaped { - // Use a temp dir to make sure we don't pick up .cargo/config somewhere in - // the hierarchy - let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("new").arg("foo").env("USER", "foo \"bar\"") - .cwd(td.path().clone()), - execs().with_status(0)); - - let toml = td.path().join("foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"authors = ["foo \"bar\""]"#)); -}); - -test!(finds_author_username { - // Use a temp dir to make sure we don't pick up .cargo/config somewhere in - // the hierarchy - let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("new").arg("foo") - .env_remove("USER") - .env("USERNAME", "foo") - .cwd(td.path().clone()), - execs().with_status(0)); - - let toml = td.path().join("foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"authors = ["foo"]"#)); -}); - -test!(finds_author_email { - // Use a temp dir to make sure we don't pick up .cargo/config somewhere in - // the hierarchy - let td = TempDir::new("cargo").unwrap(); - assert_that(cargo_process("new").arg("foo") - .env("USER", "bar") - .env("EMAIL", "baz") - .cwd(td.path().clone()), - execs().with_status(0)); - - let toml = td.path().join("foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"authors = ["bar "]"#)); -}); - -test!(finds_author_git { - my_process("git").args(&["config", "--global", "user.name", "bar"]) - .exec().unwrap(); - my_process("git").args(&["config", "--global", "user.email", "baz"]) - .exec().unwrap(); - assert_that(cargo_process("new").arg("foo").env("USER", "foo"), - execs().with_status(0)); - - let toml = paths::root().join("foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"authors = ["bar "]"#)); -}); - -test!(author_prefers_cargo { - my_process("git").args(&["config", "--global", "user.name", "foo"]) - .exec().unwrap(); - my_process("git").args(&["config", "--global", "user.email", "bar"]) - .exec().unwrap(); - let root = paths::root(); - fs::create_dir(&root.join(".cargo")).unwrap(); - File::create(&root.join(".cargo/config")).unwrap().write_all(br#" - [cargo-new] - name = "new-foo" - email = "new-bar" - git = false - "#).unwrap(); - - assert_that(cargo_process("new").arg("foo").env("USER", "foo"), - execs().with_status(0)); - - let toml = paths::root().join("foo/Cargo.toml"); - let mut contents = String::new(); - File::open(&toml).unwrap().read_to_string(&mut contents).unwrap(); - assert!(contents.contains(r#"authors = ["new-foo "]"#)); - assert!(!root.join("foo/.gitignore").c_exists()); -}); - -test!(git_prefers_command_line { - let root = paths::root(); - let td = TempDir::new("cargo").unwrap(); - fs::create_dir(&root.join(".cargo")).unwrap(); - File::create(&root.join(".cargo/config")).unwrap().write_all(br#" - [cargo-new] - vcs = "none" - name = "foo" - email = "bar" - "#).unwrap(); - - assert_that(cargo_process("new").arg("foo").arg("--vcs").arg("git") - .cwd(td.path()) - .env("USER", "foo"), - execs().with_status(0)); - assert!(td.path().join("foo/.gitignore").c_exists()); -}); - -test!(subpackage_no_git { - assert_that(cargo_process("new").arg("foo").env("USER", "foo"), - execs().with_status(0)); - - let subpackage = paths::root().join("foo").join("components"); - fs::create_dir(&subpackage).unwrap(); - assert_that(cargo_process("new").arg("foo/components/subcomponent") - .env("USER", "foo"), - execs().with_status(0)); - - assert_that(&paths::root().join("foo/components/subcomponent/.git"), - is_not(existing_file())); - assert_that(&paths::root().join("foo/components/subcomponent/.gitignore"), - is_not(existing_file())); -}); - -test!(unknown_flags { - assert_that(cargo_process("new").arg("foo").arg("--flag"), - execs().with_status(1) - .with_stderr("\ -Unknown flag: '--flag' - -Usage: - cargo new [..] - cargo new [..] -")); -}); diff --git a/tests/test_cargo_package.rs b/tests/test_cargo_package.rs deleted file mode 100644 index 5ca6b1cf632..00000000000 --- a/tests/test_cargo_package.rs +++ /dev/null @@ -1,373 +0,0 @@ -use std::fs::File; -use std::io::Cursor; -use std::io::prelude::*; -use std::path::Path; - -use cargo::util::process; -use flate2::read::GzDecoder; -use git2; -use tar::Archive; - -use support::{project, execs, cargo_dir, paths, git, path2url}; -use support::{PACKAGING, VERIFYING, COMPILING, ARCHIVING}; -use hamcrest::{assert_that, existing_file}; - -fn setup() { -} - -test!(simple { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - exclude = ["*.txt"] - license = "MIT" - description = "foo" - "#) - .file("src/main.rs", r#" - fn main() { println!("hello"); } - "#) - .file("src/bar.txt", ""); // should be ignored when packaging - - assert_that(p.cargo_process("package"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ({dir}) -{verifying} foo v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}[..]) -", - packaging = PACKAGING, - verifying = VERIFYING, - compiling = COMPILING, - dir = p.url()))); - assert_that(&p.root().join("target/package/foo-0.0.1.crate"), existing_file()); - assert_that(p.cargo("package").arg("-l"), - execs().with_status(0).with_stdout("\ -Cargo.toml -src[..]main.rs -")); - assert_that(p.cargo("package"), - execs().with_status(0).with_stdout("")); - - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - let mut rdr = GzDecoder::new(f).unwrap(); - let mut contents = Vec::new(); - rdr.read_to_end(&mut contents).unwrap(); - let ar = Archive::new(Cursor::new(contents)); - for f in ar.files().unwrap() { - let f = f.unwrap(); - let fname = f.header().path_bytes(); - let fname = &*fname; - assert!(fname == b"foo-0.0.1/Cargo.toml" || - fname == b"foo-0.0.1/src/main.rs", - "unexpected filename: {:?}", f.header().path()) - } -}); - -test!(metadata_warning { - let p = project("all") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#); - assert_that(p.cargo_process("package"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ({dir}) -{verifying} foo v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}[..]) -", - packaging = PACKAGING, - verifying = VERIFYING, - compiling = COMPILING, - dir = p.url())) - .with_stderr("\ -warning: manifest has no description, license, license-file, documentation, \ -homepage or repository. See \ -http://doc.crates.io/manifest.html#package-metadata for more info.")); - - let p = project("one") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - "#) - .file("src/main.rs", r#" - fn main() {} - "#); - assert_that(p.cargo_process("package"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ({dir}) -{verifying} foo v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}[..]) -", - packaging = PACKAGING, - verifying = VERIFYING, - compiling = COMPILING, - dir = p.url())) - .with_stderr("\ -warning: manifest has no description, documentation, homepage or repository. See \ -http://doc.crates.io/manifest.html#package-metadata for more info.")); - - let p = project("all") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - repository = "bar" - "#)) - .file("src/main.rs", r#" - fn main() {} - "#); - assert_that(p.cargo_process("package"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ({dir}) -{verifying} foo v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}[..]) -", - packaging = PACKAGING, - verifying = VERIFYING, - compiling = COMPILING, - dir = p.url()))); -}); - -test!(package_verbose { - let root = paths::root().join("all"); - let p = git::repo(&root) - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.0.1" - authors = [] - "#) - .file("a/src/lib.rs", ""); - p.build(); - let mut cargo = process(&cargo_dir().join("cargo")).unwrap(); - cargo.cwd(&root).env("HOME", &paths::home()); - assert_that(cargo.clone().arg("build"), execs().with_status(0)); - assert_that(cargo.arg("package").arg("-v").arg("--no-verify"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ([..]) -{archiving} [..] -{archiving} [..] -", - packaging = PACKAGING, - archiving = ARCHIVING))); -}); - -test!(package_verification { - let p = project("all") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#); - assert_that(p.cargo_process("build"), - execs().with_status(0)); - assert_that(p.cargo("package"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ({dir}) -{verifying} foo v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}[..]) -", - packaging = PACKAGING, - verifying = VERIFYING, - compiling = COMPILING, - dir = p.url()))); -}); - -test!(exclude { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - exclude = ["*.txt"] - "#) - .file("src/main.rs", r#" - fn main() { println!("hello"); } - "#) - .file("bar.txt", "") - .file("src/bar.txt", ""); - - assert_that(p.cargo_process("package").arg("--no-verify").arg("-v"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ([..]) -{archiving} [..] -{archiving} [..] -", packaging = PACKAGING, archiving = ARCHIVING))); -}); - -test!(include { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - exclude = ["*.txt"] - include = ["foo.txt", "**/*.rs", "Cargo.toml"] - "#) - .file("foo.txt", "") - .file("src/main.rs", r#" - fn main() { println!("hello"); } - "#) - .file("src/bar.txt", ""); // should be ignored when packaging - - assert_that(p.cargo_process("package").arg("--no-verify").arg("-v"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ([..]) -{archiving} [..] -{archiving} [..] -{archiving} [..] -", packaging = PACKAGING, archiving = ARCHIVING))); -}); - -test!(package_lib_with_bin { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - extern crate foo; - fn main() {} - "#) - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("package").arg("-v"), - execs().with_status(0)); -}); - -test!(package_new_git_repo { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - git2::Repository::init(&p.root()).unwrap(); - - assert_that(p.process(cargo_dir().join("cargo")).arg("package") - .arg("--no-verify").arg("-v"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} foo v0.0.1 ([..]) -{archiving} [..] -{archiving} [..] -", packaging = PACKAGING, archiving = ARCHIVING))); -}); - -test!(package_git_submodule { - use std::str::from_utf8; - - let project = git::new("foo", |project| { - project.file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = ["foo@example.com"] - license = "MIT" - description = "foo" - repository = "foo" - "#) - .file("src/lib.rs", "pub fn foo() {}") - }).unwrap(); - let library = git::new("bar", |library| { - library.file("Makefile", "all:") - }).unwrap(); - - let repository = git2::Repository::open(&project.root()).unwrap(); - let url = path2url(library.root()).to_string(); - git::add_submodule(&repository, &url, Path::new("bar")); - git::commit(&repository); - - let repository = git2::Repository::open(&project.root().join("bar")).unwrap(); - repository.reset(&repository.revparse_single("HEAD").unwrap(), - git2::ResetType::Hard, None).unwrap(); - - let result = project.cargo("package").arg("--no-verify").arg("-v").exec_with_output().unwrap(); - assert!(result.status.success()); - assert!(from_utf8(&result.stdout).unwrap().contains(&format!("{} bar/Makefile", ARCHIVING))); -}); - -test!(ignore_nested { - let cargo_toml = r#" - [project] - name = "nested" - version = "0.0.1" - authors = [] - license = "MIT" - description = "nested" - "#; - let main_rs = r#" - fn main() { println!("hello"); } - "#; - let p = project("nested") - .file("Cargo.toml", cargo_toml) - .file("src/main.rs", main_rs) - // If a project happens to contain a copy of itself, we should - // ignore it. - .file("a_dir/nested/Cargo.toml", cargo_toml) - .file("a_dir/nested/src/main.rs", main_rs); - - assert_that(p.cargo_process("package"), - execs().with_status(0).with_stdout(&format!("\ -{packaging} nested v0.0.1 ({dir}) -{verifying} nested v0.0.1 ({dir}) -{compiling} nested v0.0.1 ({dir}[..]) -", - packaging = PACKAGING, - verifying = VERIFYING, - compiling = COMPILING, - dir = p.url()))); - assert_that(&p.root().join("target/package/nested-0.0.1.crate"), existing_file()); - assert_that(p.cargo("package").arg("-l"), - execs().with_status(0).with_stdout("\ -Cargo.toml -src[..]main.rs -")); - assert_that(p.cargo("package"), - execs().with_status(0).with_stdout("")); - - let f = File::open(&p.root().join("target/package/nested-0.0.1.crate")).unwrap(); - let mut rdr = GzDecoder::new(f).unwrap(); - let mut contents = Vec::new(); - rdr.read_to_end(&mut contents).unwrap(); - let ar = Archive::new(Cursor::new(contents)); - for f in ar.files().unwrap() { - let f = f.unwrap(); - let fname = f.header().path_bytes(); - let fname = &*fname; - assert!(fname == b"nested-0.0.1/Cargo.toml" || - fname == b"nested-0.0.1/src/main.rs", - "unexpected filename: {:?}", f.header().path()) - } -}); diff --git a/tests/test_cargo_profiles.rs b/tests/test_cargo_profiles.rs deleted file mode 100644 index b41d4e851d7..00000000000 --- a/tests/test_cargo_profiles.rs +++ /dev/null @@ -1,111 +0,0 @@ -use std::env; -use std::path::MAIN_SEPARATOR as SEP; - -use support::{project, execs}; -use support::{COMPILING, RUNNING}; -use hamcrest::assert_that; - -fn setup() { -} - -test!(profile_overrides { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [profile.dev] - opt-level = 1 - debug = false - rpath = true - "#) - .file("src/lib.rs", ""); - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} test v0.0.0 ({url}) -{running} `rustc src{sep}lib.rs --crate-name test --crate-type lib \ - -C opt-level=1 \ - -C debug-assertions=on \ - -C rpath \ - --out-dir {dir}{sep}target{sep}debug \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}debug \ - -L dependency={dir}{sep}target{sep}debug{sep}deps` -", -running = RUNNING, compiling = COMPILING, sep = SEP, -dir = p.root().display(), -url = p.url(), -))); -}); - -test!(top_level_overrides_deps { - let mut p = project("foo"); - p = p - .file("Cargo.toml", r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [profile.release] - opt-level = 1 - debug = true - - [dependencies.foo] - path = "foo" - "#) - .file("src/lib.rs", "") - .file("foo/Cargo.toml", r#" - [package] - - name = "foo" - version = "0.0.0" - authors = [] - - [profile.release] - opt-level = 0 - debug = false - - [lib] - name = "foo" - crate_type = ["dylib", "rlib"] - "#) - .file("foo/src/lib.rs", ""); - assert_that(p.cargo_process("build").arg("-v").arg("--release"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.0 ({url}) -{running} `rustc foo{sep}src{sep}lib.rs --crate-name foo \ - --crate-type dylib --crate-type rlib -C prefer-dynamic \ - -C opt-level=1 \ - -g \ - -C metadata=[..] \ - -C extra-filename=-[..] \ - --out-dir {dir}{sep}target{sep}release{sep}deps \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}release{sep}deps \ - -L dependency={dir}{sep}target{sep}release{sep}deps` -{compiling} test v0.0.0 ({url}) -{running} `rustc src{sep}lib.rs --crate-name test --crate-type lib \ - -C opt-level=1 \ - -g \ - --out-dir {dir}{sep}target{sep}release \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}release \ - -L dependency={dir}{sep}target{sep}release{sep}deps \ - --extern foo={dir}{sep}target{sep}release{sep}deps{sep}\ - {prefix}foo-[..]{suffix} \ - --extern foo={dir}{sep}target{sep}release{sep}deps{sep}libfoo-[..].rlib` -", - running = RUNNING, - compiling = COMPILING, - dir = p.root().display(), - url = p.url(), - sep = SEP, - prefix = env::consts::DLL_PREFIX, - suffix = env::consts::DLL_SUFFIX))); -}); diff --git a/tests/test_cargo_publish.rs b/tests/test_cargo_publish.rs deleted file mode 100644 index 5e35c482a70..00000000000 --- a/tests/test_cargo_publish.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::io::prelude::*; -use std::fs::{self, File}; -use std::io::{Cursor, SeekFrom}; -use std::path::PathBuf; - -use flate2::read::GzDecoder; -use tar::Archive; -use url::Url; - -use support::{project, execs}; -use support::{UPDATING, PACKAGING, UPLOADING}; -use support::paths; -use support::git::repo; - -use hamcrest::assert_that; - -fn registry_path() -> PathBuf { paths::root().join("registry") } -fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } -fn upload_path() -> PathBuf { paths::root().join("upload") } -fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() } - -fn setup() { - let config = paths::root().join(".cargo/config"); - fs::create_dir_all(config.parent().unwrap()).unwrap(); - File::create(&config).unwrap().write_all(&format!(r#" - [registry] - index = "{reg}" - token = "api-token" - "#, reg = registry()).as_bytes()).unwrap(); - fs::create_dir_all(&upload_path().join("api/v1/crates")).unwrap(); - - repo(®istry_path()) - .file("config.json", &format!(r#"{{ - "dl": "{0}", - "api": "{0}" - }}"#, upload())) - .build(); -} - -test!(simple { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("publish").arg("--no-verify"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `{reg}` -{packaging} foo v0.0.1 ({dir}) -{uploading} foo v0.0.1 ({dir}) -", - updating = UPDATING, - uploading = UPLOADING, - packaging = PACKAGING, - dir = p.url(), - reg = registry()))); - - let mut f = File::open(&upload_path().join("api/v1/crates/new")).unwrap(); - // Skip the metadata payload and the size of the tarball - let mut sz = [0; 4]; - assert_eq!(f.read(&mut sz).unwrap(), 4); - let sz = ((sz[0] as u32) << 0) | - ((sz[1] as u32) << 8) | - ((sz[2] as u32) << 16) | - ((sz[3] as u32) << 24); - f.seek(SeekFrom::Current(sz as i64 + 4)).unwrap(); - - // Verify the tarball - let mut rdr = GzDecoder::new(f).unwrap(); - assert_eq!(rdr.header().filename().unwrap(), "foo-0.0.1.crate".as_bytes()); - let mut contents = Vec::new(); - rdr.read_to_end(&mut contents).unwrap(); - let inner = Cursor::new(contents); - let ar = Archive::new(inner); - for file in ar.files().unwrap() { - let file = file.unwrap(); - let fname = file.header().path_bytes(); - let fname = &*fname; - assert!(fname == b"foo-0.0.1/Cargo.toml" || - fname == b"foo-0.0.1/src/main.rs", - "unexpected filename: {:?}", file.header().path()); - } -}); - -test!(git_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [dependencies.foo] - git = "git://path/to/nowhere" - "#) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("publish").arg("-v").arg("--no-verify"), - execs().with_status(101).with_stderr("\ -all dependencies must come from the same source. -dependency `foo` comes from git://path/to/nowhere instead -")); -}); - -test!(path_dependency_no_version { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", ""); - - assert_that(p.cargo_process("publish"), - execs().with_status(101).with_stderr("\ -all path dependencies must have a version specified when publishing. -dependency `bar` does not specify a version -")); -}); diff --git a/tests/test_cargo_read_manifest.rs b/tests/test_cargo_read_manifest.rs deleted file mode 100644 index 52f79ccb978..00000000000 --- a/tests/test_cargo_read_manifest.rs +++ /dev/null @@ -1,79 +0,0 @@ -use support::{project, execs, main_file, basic_bin_manifest}; -use hamcrest::{assert_that}; - -fn setup() {} - -fn read_manifest_output() -> String { - "\ -{\ - \"name\":\"foo\",\ - \"version\":\"0.5.0\",\ - \"dependencies\":[],\ - \"targets\":[{\ - \"kind\":[\"bin\"],\ - \"name\":\"foo\",\ - \"src_path\":\"src[..]foo.rs\",\ - \"metadata\":null\ - }],\ - \"manifest_path\":\"[..]Cargo.toml\"\ -}".into() -} - -test!(cargo_read_manifest_path_to_cargo_toml_relative { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("read-manifest") - .arg("--manifest-path").arg("foo/Cargo.toml") - .cwd(p.root().parent().unwrap()), - execs().with_status(0) - .with_stdout(read_manifest_output())); -}); - -test!(cargo_read_manifest_path_to_cargo_toml_absolute { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("read-manifest") - .arg("--manifest-path").arg(p.root().join("Cargo.toml")) - .cwd(p.root().parent().unwrap()), - execs().with_status(0) - .with_stdout(read_manifest_output())); -}); - -test!(cargo_read_manifest_path_to_cargo_toml_parent_relative { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("read-manifest") - .arg("--manifest-path").arg("foo") - .cwd(p.root().parent().unwrap()), - execs().with_status(101) - .with_stderr("the manifest-path must be a path to a Cargo.toml file")); -}); - -test!(cargo_read_manifest_path_to_cargo_toml_parent_absolute { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("read-manifest") - .arg("--manifest-path").arg(p.root()) - .cwd(p.root().parent().unwrap()), - execs().with_status(101) - .with_stderr("the manifest-path must be a path to a Cargo.toml file")); -}); - -test!(cargo_read_manifest_cwd { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("read-manifest") - .cwd(p.root()), - execs().with_status(0) - .with_stdout(read_manifest_output())); -}); diff --git a/tests/test_cargo_registry.rs b/tests/test_cargo_registry.rs deleted file mode 100644 index fef4fa5008a..00000000000 --- a/tests/test_cargo_registry.rs +++ /dev/null @@ -1,831 +0,0 @@ -use std::fs::{self, File}; -use std::io::prelude::*; -use cargo::util::process; - -use support::{project, execs, cargo_dir}; -use support::{UPDATING, DOWNLOADING, COMPILING, PACKAGING, VERIFYING, ADDING, REMOVING}; -use support::paths::{self, CargoPathExt}; -use support::registry as r; -use support::git; - -use hamcrest::assert_that; - -fn setup() { - r::init(); -} - -test!(simple { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = ">= 0.0.0" - "#) - .file("src/main.rs", "fn main() {}"); - - r::mock_pkg("bar", "0.0.1", &[]); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `{reg}` -{downloading} bar v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", - updating = UPDATING, - downloading = DOWNLOADING, - compiling = COMPILING, - dir = p.url(), - reg = r::registry()))); - - // Don't download a second time - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `{reg}` -[..] bar v0.0.1 (registry file://[..]) -[..] foo v0.0.1 ({dir}) -", - updating = UPDATING, - dir = p.url(), - reg = r::registry()))); -}); - -test!(deps { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = ">= 0.0.0" - "#) - .file("src/main.rs", "fn main() {}"); - - r::mock_pkg("baz", "0.0.1", &[]); - r::mock_pkg("bar", "0.0.1", &[("baz", "*", "normal")]); - - assert_that(p.cargo_process("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `{reg}` -{downloading} [..] v0.0.1 (registry file://[..]) -{downloading} [..] v0.0.1 (registry file://[..]) -{compiling} baz v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", - updating = UPDATING, - downloading = DOWNLOADING, - compiling = COMPILING, - dir = p.url(), - reg = r::registry()))); -}); - -test!(nonexistent { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - nonexistent = ">= 0.0.0" - "#) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr("\ -no matching package named `nonexistent` found (required by `foo`) -location searched: registry file://[..] -version required: >= 0.0.0 -")); -}); - -test!(wrong_version { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - foo = ">= 1.0.0" - "#) - .file("src/main.rs", "fn main() {}"); - - r::mock_pkg("foo", "0.0.1", &[]); - r::mock_pkg("foo", "0.0.2", &[]); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr("\ -no matching package named `foo` found (required by `foo`) -location searched: registry file://[..] -version required: >= 1.0.0 -versions found: 0.0.2, 0.0.1 -")); - - r::mock_pkg("foo", "0.0.3", &[]); - r::mock_pkg("foo", "0.0.4", &[]); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr("\ -no matching package named `foo` found (required by `foo`) -location searched: registry file://[..] -version required: >= 1.0.0 -versions found: 0.0.4, 0.0.3, 0.0.2, ... -")); -}); - -test!(bad_cksum { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bad-cksum = ">= 0.0.0" - "#) - .file("src/main.rs", "fn main() {}"); - - r::mock_pkg("bad-cksum", "0.0.1", &[]); - File::create(&r::mock_archive_dst("bad-cksum", "0.0.1")).unwrap(); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(101).with_stderr("\ -Unable to get packages from source - -Caused by: - Failed to download package `bad-cksum v0.0.1 (registry file://[..])` from [..] - -Caused by: - Failed to verify the checksum of `bad-cksum v0.0.1 (registry file://[..])` -")); -}); - -test!(update_registry { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - notyet = ">= 0.0.0" - "#) - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("build"), - execs().with_status(101).with_stderr("\ -no matching package named `notyet` found (required by `foo`) -location searched: registry file://[..] -version required: >= 0.0.0 -")); - - r::mock_pkg("notyet", "0.0.1", &[]); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `{reg}` -{downloading} notyet v0.0.1 (registry file://[..]) -{compiling} notyet v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", - updating = UPDATING, - downloading = DOWNLOADING, - compiling = COMPILING, - dir = p.url(), - reg = r::registry()))); -}); - -test!(package_with_path_deps { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - repository = "bar" - - [dependencies.notyet] - version = "0.0.1" - path = "notyet" - "#) - .file("src/main.rs", "fn main() {}") - .file("notyet/Cargo.toml", r#" - [package] - name = "notyet" - version = "0.0.1" - authors = [] - "#) - .file("notyet/src/lib.rs", ""); - p.build(); - - assert_that(p.cargo("package").arg("-v"), - execs().with_status(101).with_stderr("\ -failed to verify package tarball - -Caused by: - no matching package named `notyet` found (required by `foo`) -location searched: registry file://[..] -version required: ^0.0.1 -")); - - r::mock_pkg("notyet", "0.0.1", &[]); - - assert_that(p.cargo("package"), - execs().with_status(0).with_stdout(format!("\ -{packaging} foo v0.0.1 ({dir}) -{verifying} foo v0.0.1 ({dir}) -{updating} registry `[..]` -{downloading} notyet v0.0.1 (registry file://[..]) -{compiling} notyet v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}[..]) -", - packaging = PACKAGING, - verifying = VERIFYING, - updating = UPDATING, - downloading = DOWNLOADING, - compiling = COMPILING, - dir = p.url(), -))); -}); - -test!(lockfile_locks { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("bar", "0.0.1", &[]); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{downloading} bar v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); - - p.root().move_into_the_past().unwrap(); - r::mock_pkg("bar", "0.0.2", &[]); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout("")); -}); - -test!(lockfile_locks_transitively { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("baz", "0.0.1", &[]); - r::mock_pkg("bar", "0.0.1", &[("baz", "*", "normal")]); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{downloading} [..] v0.0.1 (registry file://[..]) -{downloading} [..] v0.0.1 (registry file://[..]) -{compiling} baz v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); - - p.root().move_into_the_past().unwrap(); - r::mock_pkg("baz", "0.0.2", &[]); - r::mock_pkg("bar", "0.0.2", &[("baz", "*", "normal")]); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout("")); -}); - -test!(yanks_are_not_used { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("baz", "0.0.1", &[]); - r::mock_pkg_yank("baz", "0.0.2", &[], true); - r::mock_pkg("bar", "0.0.1", &[("baz", "*", "normal")]); - r::mock_pkg_yank("bar", "0.0.2", &[("baz", "*", "normal")], true); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{downloading} [..] v0.0.1 (registry file://[..]) -{downloading} [..] v0.0.1 (registry file://[..]) -{compiling} baz v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); -}); - -test!(relying_on_a_yank_is_bad { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("baz", "0.0.1", &[]); - r::mock_pkg_yank("baz", "0.0.2", &[], true); - r::mock_pkg("bar", "0.0.1", &[("baz", "=0.0.2", "normal")]); - - assert_that(p.cargo("build"), - execs().with_status(101).with_stderr("\ -no matching package named `baz` found (required by `bar`) -location searched: registry file://[..] -version required: = 0.0.2 -versions found: 0.0.1 -")); -}); - -test!(yanks_in_lockfiles_are_ok { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("bar", "0.0.1", &[]); - - assert_that(p.cargo("build"), - execs().with_status(0)); - - fs::remove_dir_all(&r::registry_path().join("3")).unwrap(); - - r::mock_pkg_yank("bar", "0.0.1", &[], true); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout("")); - - assert_that(p.cargo("update"), - execs().with_status(101).with_stderr("\ -no matching package named `bar` found (required by `foo`) -location searched: registry file://[..] -version required: * -")); -}); - -test!(update_with_lockfile_if_packages_missing { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("bar", "0.0.1", &[]); - assert_that(p.cargo("build"), - execs().with_status(0)); - p.root().move_into_the_past().unwrap(); - - paths::home().join(".cargo/registry").rm_rf().unwrap(); - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{downloading} bar v0.0.1 (registry file://[..]) -", updating = UPDATING, downloading = DOWNLOADING))); -}); - -test!(update_lockfile { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - println!("0.0.1"); - r::mock_pkg("bar", "0.0.1", &[]); - assert_that(p.cargo("build"), - execs().with_status(0)); - - r::mock_pkg("bar", "0.0.2", &[]); - r::mock_pkg("bar", "0.0.3", &[]); - paths::home().join(".cargo/registry").rm_rf().unwrap(); - println!("0.0.2 update"); - assert_that(p.cargo("update") - .arg("-p").arg("bar").arg("--precise").arg("0.0.2"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{updating} bar v0.0.1 (registry file://[..]) -> v0.0.2 -", updating = UPDATING))); - - println!("0.0.2 build"); - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{downloading} [..] v0.0.2 (registry file://[..]) -{compiling} bar v0.0.2 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); - - println!("0.0.3 update"); - assert_that(p.cargo("update") - .arg("-p").arg("bar"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{updating} bar v0.0.2 (registry file://[..]) -> v0.0.3 -", updating = UPDATING))); - - println!("0.0.3 build"); - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{downloading} [..] v0.0.3 (registry file://[..]) -{compiling} bar v0.0.3 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); - - println!("new dependencies update"); - r::mock_pkg("bar", "0.0.4", &[("spam", "0.2.5", "")]); - r::mock_pkg("spam", "0.2.5", &[]); - assert_that(p.cargo("update") - .arg("-p").arg("bar"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{updating} bar v0.0.3 (registry file://[..]) -> v0.0.4 -{adding} spam v0.2.5 (registry file://[..]) -", updating = UPDATING, adding = ADDING))); - - println!("new dependencies update"); - r::mock_pkg("bar", "0.0.5", &[]); - assert_that(p.cargo("update") - .arg("-p").arg("bar"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{updating} bar v0.0.4 (registry file://[..]) -> v0.0.5 -{removing} spam v0.2.5 (registry file://[..]) -", updating = UPDATING, removing = REMOVING))); -}); - -test!(dev_dependency_not_used { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("baz", "0.0.1", &[]); - r::mock_pkg("bar", "0.0.1", &[("baz", "*", "dev")]); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{downloading} [..] v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 ({dir}) -", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); -}); - -test!(login_with_no_cargo_dir { - let home = paths::home().join("new-home"); - fs::create_dir(&home).unwrap(); - assert_that(process(&cargo_dir().join("cargo")).unwrap() - .arg("login").arg("foo").arg("-v") - .cwd(&paths::root()) - .env("HOME", &home), - execs().with_status(0)); -}); - -test!(bad_license_file { - let p = project("all") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license-file = "foo" - description = "bar" - repository = "baz" - "#) - .file("src/main.rs", r#" - fn main() {} - "#); - assert_that(p.cargo_process("publish").arg("-v"), - execs().with_status(101) - .with_stderr("\ -the license file `foo` does not exist")); -}); - -test!(updating_a_dep { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.a] - path = "a" - "#) - .file("src/main.rs", "fn main() {}") - .file("a/Cargo.toml", r#" - [project] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#) - .file("a/src/lib.rs", ""); - p.build(); - - r::mock_pkg("bar", "0.0.1", &[]); - - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{downloading} bar v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) -{compiling} a v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); - - File::create(&p.root().join("a/Cargo.toml")).unwrap().write_all(br#" - [project] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#).unwrap(); - r::mock_pkg("bar", "0.1.0", &[]); - - println!("second"); - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} registry `[..]` -{downloading} bar v0.1.0 (registry file://[..]) -{compiling} bar v0.1.0 (registry file://[..]) -{compiling} a v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); -}); - -test!(git_and_registry_dep { - let b = git::repo(&paths::root().join("b")) - .file("Cargo.toml", r#" - [project] - name = "b" - version = "0.0.1" - authors = [] - - [dependencies] - a = "0.0.1" - "#) - .file("src/lib.rs", ""); - b.build(); - let p = project("foo") - .file("Cargo.toml", &format!(r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = "0.0.1" - - [dependencies.b] - git = '{}' - "#, b.url())) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("a", "0.0.1", &[]); - - p.root().move_into_the_past().unwrap(); - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} [..] -{updating} [..] -{downloading} a v0.0.1 (registry file://[..]) -{compiling} a v0.0.1 (registry [..]) -{compiling} b v0.0.1 ([..]) -{compiling} foo v0.0.1 ({dir}) -", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); - p.root().move_into_the_past().unwrap(); - - println!("second"); - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout("")); -}); - -test!(update_publish_then_update { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = "0.1.0" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("a", "0.1.0", &[]); - - assert_that(p.cargo("build"), - execs().with_status(0)); - - - r::mock_pkg("a", "0.1.1", &[]); - - let lock = p.root().join("Cargo.lock"); - let mut s = String::new(); - File::open(&lock).unwrap().read_to_string(&mut s).unwrap(); - File::create(&lock).unwrap() - .write_all(s.replace("0.1.0", "0.1.1").as_bytes()).unwrap(); - println!("second"); - - fs::remove_dir_all(&p.root().join("target")).unwrap(); - assert_that(p.cargo("build"), - execs().with_status(0).with_stdout(&format!("\ -{updating} [..] -{downloading} a v0.1.1 (registry file://[..]) -{compiling} a v0.1.1 (registry [..]) -{compiling} foo v0.5.0 ({dir}) -", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, - dir = p.url()))); - -}); - -test!(fetch_downloads { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = "0.1.0" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("a", "0.1.0", &[]); - - assert_that(p.cargo("fetch"), - execs().with_status(0) - .with_stdout(format!("\ -{updating} registry `[..]` -{downloading} a v0.1.0 (registry [..]) -", updating = UPDATING, downloading = DOWNLOADING))); -}); - -test!(update_transitive_dependency { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = "0.1.0" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("a", "0.1.0", &[("b", "*", "normal")]); - r::mock_pkg("b", "0.1.0", &[]); - - assert_that(p.cargo("fetch"), - execs().with_status(0)); - - r::mock_pkg("b", "0.1.1", &[]); - - assert_that(p.cargo("update").arg("-pb"), - execs().with_status(0) - .with_stdout(format!("\ -{updating} registry `[..]` -{updating} b v0.1.0 (registry [..]) -> v0.1.1 -", updating = UPDATING))); - - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout(format!("\ -{downloading} b v0.1.1 (registry file://[..]) -{compiling} b v0.1.1 (registry [..]) -{compiling} a v0.1.0 (registry [..]) -{compiling} foo v0.5.0 ([..]) -", downloading = DOWNLOADING, compiling = COMPILING))); -}); - -test!(update_backtracking_ok { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - webdriver = "0.1" - "#) - .file("src/main.rs", "fn main() {}"); - p.build(); - - r::mock_pkg("webdriver", "0.1.0", &[("hyper", "0.6", "normal")]); - r::mock_pkg("hyper", "0.6.5", &[("openssl", "0.1", "normal"), - ("cookie", "0.1", "normal")]); - r::mock_pkg("cookie", "0.1.0", &[("openssl", "0.1", "normal")]); - r::mock_pkg("openssl", "0.1.0", &[]); - - assert_that(p.cargo("generate-lockfile"), - execs().with_status(0)); - - r::mock_pkg("openssl", "0.1.1", &[]); - r::mock_pkg("hyper", "0.6.6", &[("openssl", "0.1.1", "normal"), - ("cookie", "0.1.0", "normal")]); - - assert_that(p.cargo("update").arg("-p").arg("hyper"), - execs().with_status(0) - .with_stdout(&format!("\ -{updating} registry `[..]` -", updating = UPDATING))); -}); diff --git a/tests/test_cargo_run.rs b/tests/test_cargo_run.rs deleted file mode 100644 index a85e7bc2816..00000000000 --- a/tests/test_cargo_run.rs +++ /dev/null @@ -1,488 +0,0 @@ -use std::path::MAIN_SEPARATOR as SEP; - -use support::{project, execs, path2url}; -use support::{COMPILING, RUNNING}; -use hamcrest::{assert_that, existing_file}; - -fn setup() { -} - -test!(simple { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() { println!("hello"); } - "#); - - assert_that(p.cargo_process("run"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} `target{sep}debug{sep}foo[..]` -hello -", - compiling = COMPILING, - running = RUNNING, - dir = path2url(p.root()), - sep = SEP))); - assert_that(&p.bin("foo"), existing_file()); -}); - -test!(simple_quiet { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() { println!("hello"); } - "#); - - assert_that(p.cargo_process("run").arg("-q"), - execs().with_status(0).with_stdout("\ -hello -") - ); -}); - -test!(simple_quiet_and_verbose { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() { println!("hello"); } - "#); - - assert_that(p.cargo_process("run").arg("-q").arg("-v"), - execs().with_status(101).with_stderr("\ -cannot set both --verbose and --quiet -") - ); -}); - -test!(simple_with_args { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() { - assert_eq!(std::env::args().nth(1).unwrap(), "hello"); - assert_eq!(std::env::args().nth(2).unwrap(), "world"); - } - "#); - - assert_that(p.cargo_process("run").arg("hello").arg("world"), - execs().with_status(0)); -}); - -test!(exit_code { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() { std::process::exit(2); } - "#); - - assert_that(p.cargo_process("run"), - execs().with_status(2)); -}); - -test!(no_main_file { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("run"), - execs().with_status(101) - .with_stderr("a bin target must be available \ - for `cargo run`\n")); -}); - -test!(too_many_bins { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "") - .file("src/bin/a.rs", "") - .file("src/bin/b.rs", ""); - - assert_that(p.cargo_process("run"), - execs().with_status(101) - .with_stderr("`cargo run` requires that a project only \ - have one executable; use the `--bin` option \ - to specify which one to run\n")); -}); - -test!(specify_name { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "") - .file("src/bin/a.rs", r#" - extern crate foo; - fn main() { println!("hello a.rs"); } - "#) - .file("src/bin/b.rs", r#" - extern crate foo; - fn main() { println!("hello b.rs"); } - "#); - - assert_that(p.cargo_process("run").arg("--bin").arg("a").arg("-v"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} `rustc src[..]lib.rs [..]` -{running} `rustc src[..]a.rs [..]` -{running} `target{sep}debug{sep}a[..]` -hello a.rs -", - compiling = COMPILING, - running = RUNNING, - dir = path2url(p.root()), - sep = SEP))); - - assert_that(p.cargo("run").arg("--bin").arg("b").arg("-v"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.1 ([..]) -{running} `rustc src[..]b.rs [..]` -{running} `target{sep}debug{sep}b[..]` -hello b.rs -", - running = RUNNING, compiling = COMPILING, - sep = SEP))); -}); - -test!(run_example { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "") - .file("examples/a.rs", r#" - fn main() { println!("example"); } - "#) - .file("src/bin/a.rs", r#" - fn main() { println!("bin"); } - "#); - - assert_that(p.cargo_process("run").arg("--example").arg("a"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} `target{sep}debug{sep}examples{sep}a[..]` -example -", - compiling = COMPILING, - running = RUNNING, - dir = path2url(p.root()), - sep = SEP))); -}); - -test!(either_name_or_example { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/bin/a.rs", r#" - fn main() { println!("hello a.rs"); } - "#) - .file("examples/b.rs", r#" - fn main() { println!("hello b.rs"); } - "#); - - assert_that(p.cargo_process("run").arg("--bin").arg("a").arg("--example").arg("b"), - execs().with_status(101) - .with_stderr("`cargo run` can run at most one \ - executable, but multiple were \ - specified")); -}); - -test!(one_bin_multiple_examples { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "") - .file("src/bin/main.rs", r#" - fn main() { println!("hello main.rs"); } - "#) - .file("examples/a.rs", r#" - fn main() { println!("hello a.rs"); } - "#) - .file("examples/b.rs", r#" - fn main() { println!("hello b.rs"); } - "#); - - assert_that(p.cargo_process("run"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} `target{sep}debug{sep}main[..]` -hello main.rs -", - compiling = COMPILING, - running = RUNNING, - dir = path2url(p.root()), - sep = SEP))); -}); - -test!(example_with_release_flag { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "*" - path = "bar" - "#) - .file("examples/a.rs", r#" - extern crate bar; - - fn main() { - if cfg!(debug_assertions) { - println!("slow1") - } else { - println!("fast1") - } - bar::baz(); - } - "#) - .file("bar/Cargo.toml", r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - "#) - .file("bar/src/bar.rs", r#" - pub fn baz() { - if cfg!(debug_assertions) { - println!("slow2") - } else { - println!("fast2") - } - } - "#); - - assert_that(p.cargo_process("run").arg("-v").arg("--release").arg("--example").arg("a"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} bar v0.0.1 ({url}) -{running} `rustc bar{sep}src{sep}bar.rs --crate-name bar --crate-type lib \ - -C opt-level=3 \ - -C metadata=[..] \ - -C extra-filename=[..] \ - --out-dir {dir}{sep}target{sep}release{sep}deps \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}release{sep}deps \ - -L dependency={dir}{sep}target{sep}release{sep}deps` -{compiling} foo v0.0.1 ({url}) -{running} `rustc examples{sep}a.rs --crate-name a --crate-type bin \ - -C opt-level=3 \ - --out-dir {dir}{sep}target{sep}release{sep}examples \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}release \ - -L dependency={dir}{sep}target{sep}release{sep}deps \ - --extern bar={dir}{sep}target{sep}release{sep}deps{sep}libbar-[..].rlib` -{running} `target{sep}release{sep}examples{sep}a[..]` -fast1 -fast2 -", - compiling = COMPILING, - running = RUNNING, - dir = p.root().display(), - url = path2url(p.root()), - sep = SEP))); - - assert_that(p.cargo("run").arg("-v").arg("--example").arg("a"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} bar v0.0.1 ({url}) -{running} `rustc bar{sep}src{sep}bar.rs --crate-name bar --crate-type lib \ - -g \ - -C metadata=[..] \ - -C extra-filename=[..] \ - --out-dir {dir}{sep}target{sep}debug{sep}deps \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}debug{sep}deps \ - -L dependency={dir}{sep}target{sep}debug{sep}deps` -{compiling} foo v0.0.1 ({url}) -{running} `rustc examples{sep}a.rs --crate-name a --crate-type bin \ - -g \ - --out-dir {dir}{sep}target{sep}debug{sep}examples \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}debug \ - -L dependency={dir}{sep}target{sep}debug{sep}deps \ - --extern bar={dir}{sep}target{sep}debug{sep}deps{sep}libbar-[..].rlib` -{running} `target{sep}debug{sep}examples{sep}a[..]` -slow1 -slow2 -", - compiling = COMPILING, - running = RUNNING, - dir = p.root().display(), - url = path2url(p.root()), - sep = SEP))); -}); - -test!(run_dylib_dep { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#) - .file("src/main.rs", r#" - extern crate bar; - fn main() { bar::bar(); } - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - crate-type = ["dylib"] - "#) - .file("bar/src/lib.rs", "pub fn bar() {}"); - - assert_that(p.cargo_process("run").arg("hello").arg("world"), - execs().with_status(0)); -}); - -test!(release_works { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() { if cfg!(debug_assertions) { panic!() } } - "#); - - assert_that(p.cargo_process("run").arg("--release"), - execs().with_status(0).with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} `target{sep}release{sep}foo[..]` -", - compiling = COMPILING, - running = RUNNING, - dir = path2url(p.root()), - sep = SEP))); - assert_that(&p.release_bin("foo"), existing_file()); -}); - -test!(run_bin_different_name { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "bar" - "#) - .file("src/bar.rs", r#" - fn main() { } - "#); - - assert_that(p.cargo_process("run"), execs().with_status(0)); -}); - -test!(dashes_are_forwarded { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "bar" - "#) - .file("src/main.rs", r#" - fn main() { - let s: Vec = std::env::args().collect(); - assert_eq!(s[1], "a"); - assert_eq!(s[2], "--"); - assert_eq!(s[3], "b"); - } - "#); - - assert_that(p.cargo_process("run").arg("--").arg("a").arg("--").arg("b"), - execs().with_status(0)); -}); - -test!(run_from_executable_folder { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() { println!("hello"); } - "#); - - let cwd = p.root().join("target").join("debug"); - p.cargo_process("build").exec_with_output().unwrap(); - - assert_that(p.cargo("run").cwd(cwd), - execs().with_status(0).with_stdout(&format!("\ -{running} `.{sep}foo[..]` -hello -", - running = RUNNING, - sep = SEP - ))); -}); diff --git a/tests/test_cargo_rustc.rs b/tests/test_cargo_rustc.rs deleted file mode 100644 index 8000517932c..00000000000 --- a/tests/test_cargo_rustc.rs +++ /dev/null @@ -1,298 +0,0 @@ -use std::path::MAIN_SEPARATOR as SEP; -use support::{execs, project}; -use support::{COMPILING, RUNNING}; -use hamcrest::{assert_that}; - -fn setup() { -} - -fn cargo_rustc_error() -> &'static str { - "extra arguments to `rustc` can only be passed to one target, consider filtering\n\ - the package by passing e.g. `--lib` or `--bin NAME` to specify a single target" -} - -test!(build_lib_for_foo { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("src/lib.rs", r#" "#); - - assert_that(p.cargo_process("rustc").arg("--lib").arg("-v"), - execs() - .with_status(0) - .with_stdout(format!("\ -{compiling} foo v0.0.1 ({url}) -{running} `rustc src{sep}lib.rs --crate-name foo --crate-type lib -g \ - --out-dir {dir}{sep}target{sep}debug \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}debug \ - -L dependency={dir}{sep}target{sep}debug{sep}deps` -", - running = RUNNING, compiling = COMPILING, sep = SEP, - dir = p.root().display(), url = p.url()))); -}); - -test!(build_lib_and_allow_unstable_options { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("src/lib.rs", r#" "#); - - assert_that(p.cargo_process("rustc").arg("--lib").arg("-v") - .arg("--").arg("-Z").arg("unstable-options"), - execs() - .with_status(0) - .with_stdout(format!("\ -{compiling} foo v0.0.1 ({url}) -{running} `rustc src{sep}lib.rs --crate-name foo --crate-type lib -g \ - -Z unstable-options \ - --out-dir {dir}{sep}target{sep}debug \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}debug \ - -L dependency={dir}{sep}target{sep}debug{sep}deps` -", - running = RUNNING, compiling = COMPILING, sep = SEP, - dir = p.root().display(), url = p.url()))) -}); - -test!(build_main_and_allow_unstable_options { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("src/lib.rs", r#" "#); - - assert_that(p.cargo_process("rustc").arg("-v").arg("--bin").arg("foo") - .arg("--").arg("-Z").arg("unstable-options"), - execs() - .with_status(0) - .with_stdout(&format!("\ -{compiling} {name} v{version} ({url}) -{running} `rustc src{sep}lib.rs --crate-name {name} --crate-type lib -g \ - --out-dir {dir}{sep}target{sep}debug \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}debug \ - -L dependency={dir}{sep}target{sep}debug{sep}deps` -{running} `rustc src{sep}main.rs --crate-name {name} --crate-type bin -g \ - -Z unstable-options \ - --out-dir {dir}{sep}target{sep}debug \ - --emit=dep-info,link \ - -L dependency={dir}{sep}target{sep}debug \ - -L dependency={dir}{sep}target{sep}debug{sep}deps \ - --extern {name}={dir}{sep}target{sep}debug{sep}lib{name}.rlib` -", - running = RUNNING, compiling = COMPILING, sep = SEP, - dir = p.root().display(), url = p.url(), - name = "foo", version = "0.0.1"))); -}); - -test!(fails_when_trying_to_build_main_and_lib_with_args { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", r#" - fn main() {} - "#) - .file("src/lib.rs", r#" "#); - - assert_that(p.cargo_process("rustc").arg("-v") - .arg("--").arg("-Z").arg("unstable-options"), - execs() - .with_status(101) - .with_stderr(cargo_rustc_error())); -}); - -test!(build_with_args_to_one_of_multiple_binaries { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/bin/foo.rs", r#" - fn main() {} - "#) - .file("src/bin/bar.rs", r#" - fn main() {} - "#) - .file("src/bin/baz.rs", r#" - fn main() {} - "#) - .file("src/lib.rs", r#" "#); - - assert_that(p.cargo_process("rustc").arg("-v").arg("--bin").arg("bar") - .arg("--").arg("-Z").arg("unstable-options"), - execs() - .with_status(0) - .with_stdout(format!("\ -{compiling} foo v0.0.1 ({url}) -{running} `rustc src{sep}lib.rs --crate-name foo --crate-type lib -g \ - --out-dir {dir}{sep}target{sep}debug [..]` -{running} `rustc src{sep}bin{sep}bar.rs --crate-name bar --crate-type bin -g \ - -Z unstable-options [..]` -", - compiling = COMPILING, running = RUNNING, sep = SEP, - dir = p.root().display(), url = p.url()))); -}); - -test!(fails_with_args_to_all_binaries { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/bin/foo.rs", r#" - fn main() {} - "#) - .file("src/bin/bar.rs", r#" - fn main() {} - "#) - .file("src/bin/baz.rs", r#" - fn main() {} - "#) - .file("src/lib.rs", r#" "#); - - assert_that(p.cargo_process("rustc").arg("-v") - .arg("--").arg("-Z").arg("unstable-options"), - execs() - .with_status(101) - .with_stderr(cargo_rustc_error())); -}); - -test!(build_with_args_to_one_of_multiple_tests { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("tests/foo.rs", r#" "#) - .file("tests/bar.rs", r#" "#) - .file("tests/baz.rs", r#" "#) - .file("src/lib.rs", r#" "#); - - assert_that(p.cargo_process("rustc").arg("-v").arg("--test").arg("bar") - .arg("--").arg("-Z").arg("unstable-options"), - execs() - .with_status(0) - .with_stdout(format!("\ -{compiling} foo v0.0.1 ({url}) -{running} `rustc src{sep}lib.rs --crate-name foo --crate-type lib -g \ - --out-dir {dir}{sep}target{sep}debug [..]` -{running} `rustc tests{sep}bar.rs --crate-name bar --crate-type bin -g \ - -Z unstable-options [..]--test[..]` -", - compiling = COMPILING, running = RUNNING, sep = SEP, - dir = p.root().display(), url = p.url()))); -}); - -test!(build_foo_with_bar_dependency { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#) - .file("src/main.rs", r#" - extern crate bar; - fn main() { - bar::baz() - } - "#); - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - "#) - .file("src/lib.rs", r#" - pub fn baz() {} - "#); - bar.build(); - - assert_that(foo.cargo_process("rustc").arg("-v").arg("--").arg("-Z").arg("unstable-options"), - execs() - .with_status(0) - .with_stdout(format!("\ -{compiling} bar v0.1.0 ({url}) -{running} `[..] -g -C [..]` -{compiling} foo v0.0.1 ({url}) -{running} `[..] -g -Z unstable-options [..]` -", - compiling = COMPILING, running = RUNNING, - url = foo.url()))); -}); - -test!(build_only_bar_dependency { - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#) - .file("src/main.rs", r#" - extern crate bar; - fn main() { - bar::baz() - } - "#); - let bar = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - "#) - .file("src/lib.rs", r#" - pub fn baz() {} - "#); - bar.build(); - - assert_that(foo.cargo_process("rustc").arg("-v").arg("-p").arg("bar") - .arg("--").arg("-Z").arg("unstable-options"), - execs() - .with_status(0) - .with_stdout(format!("\ -{compiling} bar v0.1.0 ({url}) -{running} `[..]--crate-name bar --crate-type lib [..] -Z unstable-options [..]` -", - compiling = COMPILING, running = RUNNING, - url = foo.url()))); -}); diff --git a/tests/test_cargo_search.rs b/tests/test_cargo_search.rs deleted file mode 100644 index 6e55047f1d3..00000000000 --- a/tests/test_cargo_search.rs +++ /dev/null @@ -1,94 +0,0 @@ -use std::fs::{self, File}; -use std::io::prelude::*; -use std::path::PathBuf; - -use url::Url; - -use cargo::util::{process, ProcessBuilder}; -use support::UPDATING; -use support::{execs, cargo_dir}; -use support::paths; -use support::git::repo; - -use hamcrest::assert_that; - -fn registry_path() -> PathBuf { paths::root().join("registry") } -fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } -fn api_path() -> PathBuf { paths::root().join("api") } -fn api() -> Url { Url::from_file_path(&*api_path()).ok().unwrap() } - -fn setup() { - let config = paths::root().join(".cargo/config"); - fs::create_dir_all(config.parent().unwrap()).unwrap(); - File::create(&config).unwrap().write_all(format!(r#" - [registry] - index = "{reg}" - "#, reg = registry()).as_bytes()).unwrap(); - fs::create_dir_all(&api_path().join("api/v1")).unwrap(); - - repo(®istry_path()) - .file("config.json", &format!(r#"{{ - "dl": "{0}", - "api": "{0}" - }}"#, api())) - .build(); -} - -fn cargo_process(s: &str) -> ProcessBuilder { - let mut b = process(&cargo_dir().join("cargo")).unwrap(); - b.arg(s).cwd(&paths::root()).env("HOME", &paths::home()); - b -} - -test!(simple { - let contents = r#"{ - "crates": [{ - "created_at": "2014-11-16T20:17:35Z", - "description": "Design by contract style assertions for Rust", - "documentation": null, - "downloads": 2, - "homepage": null, - "id": "hoare", - "keywords": [], - "license": null, - "links": { - "owners": "/api/v1/crates/hoare/owners", - "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", - "version_downloads": "/api/v1/crates/hoare/downloads", - "versions": "/api/v1/crates/hoare/versions" - }, - "max_version": "0.1.1", - "name": "hoare", - "repository": "https://github.com/nick29581/libhoare", - "updated_at": "2014-11-20T21:49:21Z", - "versions": null - }], - "meta": { - "total": 1 - } - }"#; - let base = api_path().join("api/v1/crates"); - - // Older versions of curl don't peel off query parameters when looking for - // filenames, so just make both files. - // - // On windows, though, `?` is an invalid character, but we always build curl - // from source there anyway! - File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap(); - if !cfg!(windows) { - File::create(&base.with_file_name("crates?q=postgres")).unwrap() - .write_all(contents.as_bytes()).unwrap(); - } - - assert_that(cargo_process("search").arg("postgres"), - execs().with_status(0).with_stdout(format!("\ -{updating} registry `[..]` -hoare (0.1.1) Design by contract style assertions for Rust", updating = UPDATING))); -}); - -test!(help { - assert_that(cargo_process("search").arg("-h"), - execs().with_status(0)); - assert_that(cargo_process("help").arg("search"), - execs().with_status(0)); -}); diff --git a/tests/test_cargo_test.rs b/tests/test_cargo_test.rs deleted file mode 100644 index a3522c4f55b..00000000000 --- a/tests/test_cargo_test.rs +++ /dev/null @@ -1,1857 +0,0 @@ -use std::str; - -use support::{project, execs, basic_bin_manifest, basic_lib_manifest}; -use support::{COMPILING, RUNNING, DOCTEST}; -use support::paths::CargoPathExt; -use hamcrest::{assert_that, existing_file, is_not}; -use cargo::util::process; - -fn setup() {} - -test!(cargo_test_simple { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", r#" - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[test] - fn test_hello() { - assert_eq!(hello(), "hello") - }"#); - - assert_that(p.cargo_process("build"), execs()); - assert_that(&p.bin("foo"), existing_file()); - - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout("hello\n")); - - assert_that(p.cargo("test"), - execs().with_stdout(format!("\ -{} foo v0.5.0 ({}) -{} target[..]foo-[..] - -running 1 test -test test_hello ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - COMPILING, p.url(), - RUNNING))); -}); - -test!(cargo_test_release { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - authors = [] - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - "#) - .file("src/lib.rs", r#" - extern crate bar; - pub fn foo() { bar::bar(); } - - #[test] - fn test() { foo(); } - "#) - .file("tests/test.rs", r#" - extern crate foo; - - #[test] - fn test() { foo::foo(); } - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", "pub fn bar() {}"); - - assert_that(p.cargo_process("test").arg("-v").arg("--release"), - execs().with_stdout(format!("\ -{compiling} bar v0.0.1 ({dir}) -{running} [..] -C opt-level=3 [..] -{compiling} foo v0.1.0 ({dir}) -{running} [..] -C opt-level=3 [..] -{running} [..] -C opt-level=3 [..] -{running} [..] -C opt-level=3 [..] -{running} `[..]target[..]foo-[..]` - -running 1 test -test test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} `[..]target[..]test-[..]` - -running 1 test -test test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo -{running} `rustdoc --test [..]lib.rs[..]` - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", -compiling = COMPILING, dir = p.url(), running = RUNNING, doctest = DOCTEST))); -}); - -test!(cargo_test_verbose { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", r#" - fn main() {} - #[test] fn test_hello() {} - "#); - - assert_that(p.cargo_process("test").arg("-v").arg("hello"), - execs().with_stdout(format!("\ -{compiling} foo v0.5.0 ({url}) -{running} `rustc src[..]foo.rs [..]` -{running} `[..]target[..]foo-[..] hello` - -running 1 test -test test_hello ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, url = p.url(), running = RUNNING))); -}); - -test!(many_similar_names { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - pub fn foo() {} - #[test] fn lib_test() {} - ") - .file("src/main.rs", " - extern crate foo; - fn main() {} - #[test] fn bin_test() { foo::foo() } - ") - .file("tests/foo.rs", r#" - extern crate foo; - #[test] fn test_test() { foo::foo() } - "#); - - let output = p.cargo_process("test").arg("-v").exec_with_output().unwrap(); - let output = str::from_utf8(&output.stdout).unwrap(); - assert!(output.contains("test bin_test"), "bin_test missing\n{}", output); - assert!(output.contains("test lib_test"), "lib_test missing\n{}", output); - assert!(output.contains("test test_test"), "test_test missing\n{}", output); -}); - -test!(cargo_test_failing_test { - if !::can_panic() { return } - - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", r#" - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[test] - fn test_hello() { - assert_eq!(hello(), "nope") - }"#); - - assert_that(p.cargo_process("build"), execs()); - assert_that(&p.bin("foo"), existing_file()); - - assert_that(process(&p.bin("foo")).unwrap(), - execs().with_stdout("hello\n")); - - assert_that(p.cargo("test"), - execs().with_stdout(format!("\ -{} foo v0.5.0 ({}) -{} target[..]foo-[..] - -running 1 test -test test_hello ... FAILED - -failures: - ----- test_hello stdout ---- -thread 'test_hello' panicked at 'assertion failed: \ - `(left == right)` (left: \ - `\"hello\"`, right: `\"nope\"`)', src[..]foo.rs:12 - - - -failures: - test_hello - -test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured - -", - COMPILING, p.url(), RUNNING)) - .with_status(101)); -}); - -test!(test_with_lib_dep { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "baz" - path = "src/main.rs" - "#) - .file("src/lib.rs", r#" - /// - /// ```rust - /// extern crate foo; - /// fn main() { - /// println!("{:?}", foo::foo()); - /// } - /// ``` - /// - pub fn foo(){} - #[test] fn lib_test() {} - "#) - .file("src/main.rs", " - extern crate foo; - - fn main() {} - - #[test] - fn bin_test() {} - "); - - assert_that(p.cargo_process("test"), - execs().with_stdout(format!("\ -{} foo v0.0.1 ({}) -{running} target[..]baz-[..] - -running 1 test -test bin_test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]foo[..] - -running 1 test -test lib_test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 1 test -test foo_0 ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - COMPILING, p.url(), running = RUNNING, doctest = DOCTEST))) -}); - -test!(test_with_deep_lib_dep { - let p = project("bar") - .file("Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies.foo] - path = "../foo" - "#) - .file("src/lib.rs", " - extern crate foo; - /// ``` - /// bar::bar(); - /// ``` - pub fn bar() {} - - #[test] - fn bar_test() { - foo::foo(); - } - "); - let p2 = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - pub fn foo() {} - - #[test] - fn foo_test() {} - "); - - p2.build(); - assert_that(p.cargo_process("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{compiling} bar v0.0.1 ({dir}) -{running} target[..] - -running 1 test -test bar_test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} bar - -running 1 test -test bar_0 ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, running = RUNNING, - doctest = DOCTEST, - dir = p.url()))); -}); - -test!(external_test_explicit { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[test]] - name = "test" - path = "src/test.rs" - "#) - .file("src/lib.rs", r#" - pub fn get_hello() -> &'static str { "Hello" } - - #[test] - fn internal_test() {} - "#) - .file("src/test.rs", r#" - extern crate foo; - - #[test] - fn external_test() { assert_eq!(foo::get_hello(), "Hello") } - "#); - - assert_that(p.cargo_process("test"), - execs().with_stdout(format!("\ -{} foo v0.0.1 ({}) -{running} target[..]foo-[..] - -running 1 test -test internal_test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]test-[..] - -running 1 test -test external_test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", - COMPILING, p.url(), running = RUNNING, doctest = DOCTEST))) -}); - -test!(external_test_implicit { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", r#" - pub fn get_hello() -> &'static str { "Hello" } - - #[test] - fn internal_test() {} - "#) - .file("tests/external.rs", r#" - extern crate foo; - - #[test] - fn external_test() { assert_eq!(foo::get_hello(), "Hello") } - "#); - - assert_that(p.cargo_process("test"), - execs().with_stdout(format!("\ -{} foo v0.0.1 ({}) -{running} target[..]external-[..] - -running 1 test -test external_test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]foo-[..] - -running 1 test -test internal_test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", - COMPILING, p.url(), running = RUNNING, doctest = DOCTEST))) -}); - -test!(dont_run_examples { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", r#" - "#) - .file("examples/dont-run-me-i-will-fail.rs", r#" - fn main() { panic!("Examples should not be run by 'cargo test'"); } - "#); - assert_that(p.cargo_process("test"), - execs().with_status(0)); -}); - -test!(pass_through_command_line { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - #[test] fn foo() {} - #[test] fn bar() {} - "); - - assert_that(p.cargo_process("test").arg("bar"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]foo-[..] - -running 1 test -test bar ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, running = RUNNING, - doctest = DOCTEST, - dir = p.url()))); - - assert_that(p.cargo("test").arg("foo"), - execs().with_status(0) - .with_stdout(&format!("\ -{running} target[..]foo-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", - running = RUNNING, - doctest = DOCTEST))); -}); - -// Regression test for running cargo-test twice with -// tests in an rlib -test!(cargo_test_twice { - let p = project("test_twice") - .file("Cargo.toml", &basic_lib_manifest("test_twice")) - .file("src/test_twice.rs", r#" - #![crate_type = "rlib"] - - #[test] - fn dummy_test() { } - "#); - - p.cargo_process("build"); - - for _ in 0..2 { - assert_that(p.cargo("test"), - execs().with_status(0)); - } -}); - -test!(lib_bin_same_name { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - [[bin]] - name = "foo" - "#) - .file("src/lib.rs", " - #[test] fn lib_test() {} - ") - .file("src/main.rs", " - extern crate foo; - - #[test] - fn bin_test() {} - "); - - assert_that(p.cargo_process("test"), - execs().with_stdout(format!("\ -{} foo v0.0.1 ({}) -{running} target[..]foo-[..] - -running 1 test -test [..] ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]foo-[..] - -running 1 test -test [..] ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", - COMPILING, p.url(), running = RUNNING, doctest = DOCTEST))) -}); - -test!(lib_with_standard_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - /// ``` - /// syntax::foo(); - /// ``` - pub fn foo() {} - - #[test] - fn foo_test() {} - ") - .file("tests/test.rs", " - extern crate syntax; - - #[test] - fn test() { syntax::foo() } - "); - - assert_that(p.cargo_process("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} syntax v0.0.1 ({dir}) -{running} target[..]syntax-[..] - -running 1 test -test foo_test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]test-[..] - -running 1 test -test test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} syntax - -running 1 test -test foo_0 ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, running = RUNNING, - doctest = DOCTEST, dir = p.url()))); -}); - -test!(lib_with_standard_name2 { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - name = "syntax" - test = false - doctest = false - "#) - .file("src/lib.rs", " - pub fn foo() {} - ") - .file("src/main.rs", " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - "); - - assert_that(p.cargo_process("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} syntax v0.0.1 ({dir}) -{running} target[..]syntax-[..] - -running 1 test -test test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); -}); - -test!(lib_without_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - "#) - .file("src/lib.rs", " - pub fn foo() {} - ") - .file("src/main.rs", " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - "); - - assert_that(p.cargo_process("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} syntax v0.0.1 ({dir}) -{running} target[..]syntax-[..] - -running 1 test -test test ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, dir = p.url()))); -}); - -test!(bin_without_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - - [[bin]] - path = "src/main.rs" - "#) - .file("src/lib.rs", " - pub fn foo() {} - ") - .file("src/main.rs", " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - "); - - assert_that(p.cargo_process("test"), - execs().with_status(101) - .with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - binary target bin.name is required"))); -}); - -test!(bench_without_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - - [[bench]] - path = "src/bench.rs" - "#) - .file("src/lib.rs", " - pub fn foo() {} - ") - .file("src/main.rs", " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ") - .file("src/bench.rs", " - #![feature(test)] - extern crate syntax; - extern crate test; - - #[bench] - fn external_bench(_b: &mut test::Bencher) {} - "); - - assert_that(p.cargo_process("test"), - execs().with_status(101) - .with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - bench target bench.name is required"))); -}); - -test!(test_without_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - - [[test]] - path = "src/test.rs" - "#) - .file("src/lib.rs", r#" - pub fn foo() {} - pub fn get_hello() -> &'static str { "Hello" } - "#) - .file("src/main.rs", " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ") - .file("src/test.rs", r#" - extern crate syntax; - - #[test] - fn external_test() { assert_eq!(syntax::get_hello(), "Hello") } - "#); - - assert_that(p.cargo_process("test"), - execs().with_status(101) - .with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - test target test.name is required"))); -}); - -test!(example_without_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - - [[example]] - path = "examples/example.rs" - "#) - .file("src/lib.rs", " - pub fn foo() {} - ") - .file("src/main.rs", " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ") - .file("examples/example.rs", r#" - extern crate syntax; - - fn main() { - println!("example1"); - } - "#); - - assert_that(p.cargo_process("test"), - execs().with_status(101) - .with_stderr(&format!("\ -failed to parse manifest at `[..]` - -Caused by: - example target example.name is required"))); -}); - -test!(bin_there_for_integration { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/main.rs", " - fn main() { std::process::exit(101); } - #[test] fn main_test() {} - ") - .file("tests/foo.rs", r#" - use std::process::Command; - #[test] - fn test_test() { - let status = Command::new("target/debug/foo").status().unwrap(); - assert_eq!(status.code(), Some(101)); - } - "#); - - let output = p.cargo_process("test").arg("-v").exec_with_output().unwrap(); - let output = str::from_utf8(&output.stdout).unwrap(); - assert!(output.contains("main_test ... ok"), "no main_test\n{}", output); - assert!(output.contains("test_test ... ok"), "no test_test\n{}", output); -}); - -test!(test_dylib { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate_type = ["dylib"] - - [dependencies.bar] - path = "bar" - "#) - .file("src/lib.rs", r#" - extern crate bar as the_bar; - - pub fn bar() { the_bar::baz(); } - - #[test] - fn foo() { bar(); } - "#) - .file("tests/test.rs", r#" - extern crate foo as the_foo; - - #[test] - fn foo() { the_foo::bar(); } - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - crate_type = ["dylib"] - "#) - .file("bar/src/lib.rs", " - pub fn baz() {} - "); - - assert_that(p.cargo_process("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} bar v0.0.1 ({dir}) -{compiling} foo v0.0.1 ({dir}) -{running} target[..]foo-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]test-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); - p.root().move_into_the_past().unwrap(); - assert_that(p.cargo("test"), - execs().with_status(0) - .with_stdout(format!("\ -{running} target[..]foo-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]test-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - running = RUNNING))); - -}); - -test!(test_twice_with_build_cmd { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", " - #[test] - fn foo() {} - "); - - assert_that(p.cargo_process("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]foo-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, running = RUNNING, - doctest = DOCTEST, - dir = p.url()))); - - assert_that(p.cargo("test"), - execs().with_status(0) - .with_stdout(format!("\ -{running} target[..]foo-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", - running = RUNNING, - doctest = DOCTEST))); -}); - -test!(test_then_build { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - #[test] - fn foo() {} - "); - - assert_that(p.cargo_process("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]foo-[..] - -running 1 test -test foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, running = RUNNING, - doctest = DOCTEST, - dir = p.url()))); - - assert_that(p.cargo("build"), - execs().with_status(0) - .with_stdout("")); -}); - -test!(test_no_run { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", " - #[test] - fn foo() { panic!() } - "); - - assert_that(p.cargo_process("test").arg("--no-run"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -", - compiling = COMPILING, - dir = p.url()))); -}); - -test!(test_run_specific_bin_target { - let prj = project("foo") - .file("Cargo.toml" , r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name="bin1" - path="src/bin1.rs" - - [[bin]] - name="bin2" - path="src/bin2.rs" - "#) - .file("src/bin1.rs", "#[test] fn test1() { }") - .file("src/bin2.rs", "#[test] fn test2() { }"); - - let expected_stdout = format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]bin2-[..] - -running 1 test -test test2 ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, - running = RUNNING, - dir = prj.url()); - - assert_that(prj.cargo_process("test").arg("--bin").arg("bin2"), - execs().with_status(0).with_stdout(&expected_stdout)); -}); - -test!(test_run_specific_test_target { - let prj = project("foo") - .file("Cargo.toml" , r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/bin/a.rs", "fn main() { }") - .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }") - .file("tests/a.rs", "#[test] fn test_a() { }") - .file("tests/b.rs", "#[test] fn test_b() { }"); - - let expected_stdout = format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]b-[..] - -running 1 test -test test_b ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", - compiling = COMPILING, - running = RUNNING, - dir = prj.url()); - - assert_that(prj.cargo_process("test").arg("--test").arg("b"), - execs().with_status(0).with_stdout(&expected_stdout)); -}); - -test!(test_no_harness { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "foo" - test = false - - [[test]] - name = "bar" - path = "foo.rs" - harness = false - "#) - .file("src/main.rs", "fn main() {}") - .file("foo.rs", "fn main() {}"); - - assert_that(p.cargo_process("test").arg("--").arg("--nocapture"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]bar-[..] -", - compiling = COMPILING, running = RUNNING, - dir = p.url()))); -}); - -test!(selective_testing { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - [dependencies.d2] - path = "d2" - - [lib] - name = "foo" - doctest = false - "#) - .file("src/lib.rs", "") - .file("d1/Cargo.toml", r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [lib] - name = "d1" - doctest = false - "#) - .file("d1/src/lib.rs", "") - .file("d1/src/main.rs", "extern crate d1; fn main() {}") - .file("d2/Cargo.toml", r#" - [package] - name = "d2" - version = "0.0.1" - authors = [] - - [lib] - name = "d2" - doctest = false - "#) - .file("d2/src/lib.rs", "") - .file("d2/src/main.rs", "extern crate d2; fn main() {}"); - p.build(); - - println!("d1"); - assert_that(p.cargo("test").arg("-p").arg("d1"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} d1 v0.0.1 ({dir}) -{running} target[..]d1-[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]d1-[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, - dir = p.url()))); - - println!("d2"); - assert_that(p.cargo("test").arg("-p").arg("d2"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} d2 v0.0.1 ({dir}) -{running} target[..]d2-[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -{running} target[..]d2-[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, - dir = p.url()))); - - println!("whole"); - assert_that(p.cargo("test"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} target[..]foo-[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, - dir = p.url()))); -}); - -test!(almost_cyclic_but_not_quite { - let p = project("a") - .file("Cargo.toml", r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dev-dependencies.b] - path = "b" - [dev-dependencies.c] - path = "c" - "#) - .file("src/lib.rs", r#" - #[cfg(test)] extern crate b; - #[cfg(test)] extern crate c; - "#) - .file("b/Cargo.toml", r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - - [dependencies.a] - path = ".." - "#) - .file("b/src/lib.rs", r#" - extern crate a; - "#) - .file("c/Cargo.toml", r#" - [package] - name = "c" - version = "0.0.1" - authors = [] - "#) - .file("c/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - assert_that(p.cargo("test"), - execs().with_status(0)); -}); - -test!(build_then_selective_test { - let p = project("a") - .file("Cargo.toml", r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies.b] - path = "b" - "#) - .file("src/lib.rs", "extern crate b;") - .file("src/main.rs", "extern crate b; extern crate a; fn main() {}") - .file("b/Cargo.toml", r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - "#) - .file("b/src/lib.rs", ""); - - assert_that(p.cargo_process("build"), execs().with_status(0)); - p.root().move_into_the_past().unwrap(); - assert_that(p.cargo("test").arg("-p").arg("b"), - execs().with_status(0)); -}); - -test!(example_dev_dep { - let p = project("foo") - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies.bar] - path = "bar" - "#) - .file("src/lib.rs", r#" - "#) - .file("examples/e1.rs", r#" - extern crate bar; - fn main() { } - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - "#) - .file("bar/src/lib.rs", r#" - // make sure this file takes awhile to compile - macro_rules! f0( () => (1) ); - macro_rules! f1( () => ({(f0!()) + (f0!())}) ); - macro_rules! f2( () => ({(f1!()) + (f1!())}) ); - macro_rules! f3( () => ({(f2!()) + (f2!())}) ); - macro_rules! f4( () => ({(f3!()) + (f3!())}) ); - macro_rules! f5( () => ({(f4!()) + (f4!())}) ); - macro_rules! f6( () => ({(f5!()) + (f5!())}) ); - macro_rules! f7( () => ({(f6!()) + (f6!())}) ); - macro_rules! f8( () => ({(f7!()) + (f7!())}) ); - pub fn bar() { - f8!(); - } - "#); - assert_that(p.cargo_process("test"), - execs().with_status(0)); - assert_that(p.cargo("run") - .arg("--example").arg("e1").arg("--release").arg("-v"), - execs().with_status(0)); -}); - -test!(selective_testing_with_docs { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - "#) - .file("src/lib.rs", r#" - /// ``` - /// not valid rust - /// ``` - pub fn foo() {} - "#) - .file("d1/Cargo.toml", r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [lib] - name = "d1" - path = "d1.rs" - "#) - .file("d1/d1.rs", ""); - p.build(); - - assert_that(p.cargo("test").arg("-p").arg("d1"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} d1 v0.0.1 ({dir}) -{running} target[..]deps[..]d1[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -{doctest} d1 - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, dir = p.url(), - doctest = DOCTEST))); -}); - -test!(example_bin_same_name { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) - .file("examples/foo.rs", r#"fn main() { println!("example"); }"#); - - assert_that(p.cargo_process("test").arg("--no-run").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ({dir}) -{running} `rustc [..]` -{running} `rustc [..]` -", compiling = COMPILING, running = RUNNING, dir = p.url()))); - - assert_that(&p.bin("foo"), is_not(existing_file())); - assert_that(&p.bin("examples/foo"), existing_file()); - - assert_that(p.process(&p.bin("examples/foo")), - execs().with_status(0).with_stdout("example\n")); - - assert_that(p.cargo("run"), - execs().with_status(0) - .with_stdout(&format!("\ -{compiling} foo v0.0.1 ([..]) -{running} [..] -bin -", compiling = COMPILING, running = RUNNING))); - assert_that(&p.bin("foo"), existing_file()); -}); - -test!(test_with_example_twice { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) - .file("examples/foo.rs", r#"fn main() { println!("example"); }"#); - - println!("first"); - assert_that(p.cargo_process("test").arg("-v"), - execs().with_status(0)); - assert_that(&p.bin("examples/foo"), existing_file()); - println!("second"); - assert_that(p.cargo("test").arg("-v"), - execs().with_status(0)); - assert_that(&p.bin("examples/foo"), existing_file()); -}); - -test!(example_with_dev_dep { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - test = false - doctest = false - - [dev-dependencies.a] - path = "a" - "#) - .file("src/lib.rs", "") - .file("examples/ex.rs", "extern crate a; fn main() {}") - .file("a/Cargo.toml", r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - "#) - .file("a/src/lib.rs", ""); - - assert_that(p.cargo_process("test").arg("-v"), - execs().with_status(0) - .with_stdout(&format!("\ -[..] -[..] -[..] -[..] -{running} `rustc [..] --crate-name ex [..] --extern a=[..]` -", running = RUNNING))); -}); - -test!(bin_is_preserved { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}"); - - assert_that(p.cargo_process("build").arg("-v"), - execs().with_status(0)); - assert_that(&p.bin("foo"), existing_file()); - - println!("testing"); - assert_that(p.cargo("test").arg("-v"), - execs().with_status(0)); - assert_that(&p.bin("foo"), existing_file()); -}); - -test!(bad_example { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", ""); - - assert_that(p.cargo_process("run").arg("--example").arg("foo"), - execs().with_status(101).with_stderr("\ -no example target named `foo` -")); - assert_that(p.cargo_process("run").arg("--bin").arg("foo"), - execs().with_status(101).with_stderr("\ -no bin target named `foo` -")); -}); - -test!(doctest_feature { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - [features] - bar = [] - "#) - .file("src/lib.rs", r#" - /// ```rust - /// assert_eq!(foo::foo(), 1); - /// ``` - #[cfg(feature = "bar")] - pub fn foo() -> i32 { 1 } - "#); - - assert_that(p.cargo_process("test").arg("--features").arg("bar"), - execs().with_status(0).with_stdout(format!("\ -{compiling} foo [..] -{running} target[..]foo[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 1 test -test foo_0 ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, doctest = DOCTEST))) -}); - -test!(dashes_to_underscores { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo-bar" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", r#" - /// ``` - /// assert_eq!(foo_bar::foo(), 1); - /// ``` - pub fn foo() -> i32 { 1 } - "#); - - assert_that(p.cargo_process("test").arg("-v"), - execs().with_status(0)); -}); - -test!(doctest_dev_dep { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies] - b = { path = "b" } - "#) - .file("src/lib.rs", r#" - /// ``` - /// extern crate b; - /// ``` - pub fn foo() {} - "#) - .file("b/Cargo.toml", r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - "#) - .file("b/src/lib.rs", ""); - - assert_that(p.cargo_process("test").arg("-v"), - execs().with_status(0)); -}); - -test!(filter_no_doc_tests { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#) - .file("src/lib.rs", r#" - /// ``` - /// extern crate b; - /// ``` - pub fn foo() {} - "#) - .file("tests/foo.rs", ""); - - assert_that(p.cargo_process("test").arg("--test=foo"), - execs().with_stdout(format!("\ -{compiling} foo v0.0.1 ([..]) -{running} target[..]debug[..]foo[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING))); -}); - -test!(dylib_doctest { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate-type = ["rlib", "dylib"] - test = false - "#) - .file("src/lib.rs", r#" - /// ``` - /// foo::foo(); - /// ``` - pub fn foo() {} - "#); - - assert_that(p.cargo_process("test"), - execs().with_stdout(format!("\ -{compiling} foo v0.0.1 ([..]) -{doctest} foo - -running 1 test -test foo_0 ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, doctest = DOCTEST))); -}); - -test!(dylib_doctest2 { - // can't doctest dylibs as they're statically linked together - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate-type = ["dylib"] - test = false - "#) - .file("src/lib.rs", r#" - /// ``` - /// foo::foo(); - /// ``` - pub fn foo() {} - "#); - - assert_that(p.cargo_process("test"), - execs().with_stdout("")); -}); - -test!(cyclic_dev_dep_doc_test { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies] - bar = { path = "bar" } - "#) - .file("src/lib.rs", r#" - //! ``` - //! extern crate bar; - //! ``` - "#) - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - foo = { path = ".." } - "#) - .file("bar/src/lib.rs", r#" - extern crate foo; - "#); - assert_that(p.cargo_process("test"), - execs().with_stdout(format!("\ -{compiling} foo v0.0.1 ([..]) -{compiling} bar v0.0.1 ([..]) -{running} target[..]foo[..] - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - -{doctest} foo - -running 1 test -test _0 ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - -", compiling = COMPILING, running = RUNNING, doctest = DOCTEST))) -}); - -test!(dev_dep_with_build_script { - let p = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies] - bar = { path = "bar" } - "#) - .file("src/lib.rs", "") - .file("examples/foo.rs", "fn main() {}") - .file("bar/Cargo.toml", r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - build = "build.rs" - "#) - .file("bar/src/lib.rs", "") - .file("bar/build.rs", "fn main() {}"); - assert_that(p.cargo_process("test"), - execs().with_status(0)); -}); diff --git a/tests/test_cargo_tool_paths.rs b/tests/test_cargo_tool_paths.rs deleted file mode 100644 index 4c5d3c0ee1b..00000000000 --- a/tests/test_cargo_tool_paths.rs +++ /dev/null @@ -1,120 +0,0 @@ -use support::{path2url, project, execs}; -use support::{COMPILING, RUNNING}; -use hamcrest::assert_that; - -fn setup() { -} - -test!(pathless_tools { - let target = ::rustc_host(); - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - "#) - .file("src/lib.rs", "") - .file(".cargo/config", &format!(r#" - [target.{}] - ar = "nonexistent-ar" - linker = "nonexistent-linker" - "#, target)); - - assert_that(foo.cargo_process("build").arg("--verbose"), - execs().with_stdout(&format!("\ -{compiling} foo v0.0.1 ({url}) -{running} `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` -", compiling = COMPILING, running = RUNNING, url = foo.url()))) -}); - -test!(absolute_tools { - let target = ::rustc_host(); - - // Escaped as they appear within a TOML config file - let config = if cfg!(windows) { - (r#"C:\\bogus\\nonexistent-ar"#, r#"C:\\bogus\\nonexistent-linker"#) - } else { - (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) - }; - - let foo = project("foo") - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - "#) - .file("src/lib.rs", "") - .file(".cargo/config", &format!(r#" - [target.{target}] - ar = "{ar}" - linker = "{linker}" - "#, target = target, ar = config.0, linker = config.1)); - - let output = if cfg!(windows) { - (r#"C:\bogus\nonexistent-ar"#, r#"C:\bogus\nonexistent-linker"#) - } else { - (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) - }; - - assert_that(foo.cargo_process("build").arg("--verbose"), - execs().with_stdout(&format!("\ -{compiling} foo v0.0.1 ({url}) -{running} `rustc [..] -C ar={ar} -C linker={linker} [..]` -", compiling = COMPILING, running = RUNNING, url = foo.url(), ar = output.0, linker = output.1))) -}); - -test!(relative_tools { - let target = ::rustc_host(); - - // Escaped as they appear within a TOML config file - let config = if cfg!(windows) { - (r#".\\nonexistent-ar"#, r#".\\tools\\nonexistent-linker"#) - } else { - (r#"./nonexistent-ar"#, r#"./tools/nonexistent-linker"#) - }; - - // Funky directory structure to test that relative tool paths are made absolute - // by reference to the `.cargo/..` directory and not to (for example) the CWD. - let origin = project("origin") - .file("foo/Cargo.toml", r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - "#) - .file("foo/src/lib.rs", "") - .file(".cargo/config", &format!(r#" - [target.{target}] - ar = "{ar}" - linker = "{linker}" - "#, target = target, ar = config.0, linker = config.1)); - - let foo_path = origin.root().join("foo"); - let foo_url = path2url(foo_path.clone()); - let prefix = origin.root().into_os_string().into_string().unwrap(); - let output = if cfg!(windows) { - (format!(r#"{}\.\nonexistent-ar"#, prefix), - format!(r#"{}\.\tools\nonexistent-linker"#, prefix)) - } else { - (format!(r#"{}/./nonexistent-ar"#, prefix), - format!(r#"{}/./tools/nonexistent-linker"#, prefix)) - }; - - assert_that(origin.cargo_process("build").cwd(foo_path).arg("--verbose"), - execs().with_stdout(&format!("\ -{compiling} foo v0.0.1 ({url}) -{running} `rustc [..] -C ar={ar} -C linker={linker} [..]` -", compiling = COMPILING, running = RUNNING, url = foo_url, ar = output.0, linker = output.1))) -}); diff --git a/tests/test_cargo_verify_project.rs b/tests/test_cargo_verify_project.rs deleted file mode 100644 index ca75952271d..00000000000 --- a/tests/test_cargo_verify_project.rs +++ /dev/null @@ -1,43 +0,0 @@ -use support::{project, execs, main_file, basic_bin_manifest}; -use hamcrest::{assert_that}; - -fn setup() {} - -fn verify_project_success_output() -> String { - r#"{"success":"true"}"#.into() -} - -test!(cargo_verify_project_path_to_cargo_toml_relative { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("verify-project") - .arg("--manifest-path").arg("foo/Cargo.toml") - .cwd(p.root().parent().unwrap()), - execs().with_status(0) - .with_stdout(verify_project_success_output())); -}); - -test!(cargo_verify_project_path_to_cargo_toml_absolute { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("verify-project") - .arg("--manifest-path").arg(p.root().join("Cargo.toml")) - .cwd(p.root().parent().unwrap()), - execs().with_status(0) - .with_stdout(verify_project_success_output())); -}); - -test!(cargo_verify_project_cwd { - let p = project("foo") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])); - - assert_that(p.cargo_process("verify-project") - .cwd(p.root()), - execs().with_status(0) - .with_stdout(verify_project_success_output())); -}); diff --git a/tests/test_cargo_version.rs b/tests/test_cargo_version.rs deleted file mode 100644 index 0e19bbeeabf..00000000000 --- a/tests/test_cargo_version.rs +++ /dev/null @@ -1,18 +0,0 @@ -use support::{project, execs}; -use hamcrest::assert_that; -use cargo; - -fn setup() {} - -test!(simple { - let p = project("foo"); - - assert_that(p.cargo_process("version"), - execs().with_status(0).with_stdout(&format!("{}\n", - cargo::version()))); - - assert_that(p.cargo_process("--version"), - execs().with_status(0).with_stdout(&format!("{}\n", - cargo::version()))); - -}); diff --git a/tests/test_shell.rs b/tests/test_shell.rs deleted file mode 100644 index a2a5e56388b..00000000000 --- a/tests/test_shell.rs +++ /dev/null @@ -1,97 +0,0 @@ -use std::io::prelude::*; -use std::io; -use std::sync::{Arc, Mutex}; -use term::{Terminal, TerminfoTerminal, color}; -use hamcrest::{assert_that}; - -use cargo::core::shell::{Shell, ShellConfig}; -use cargo::core::shell::ColorConfig::{Auto,Always, Never}; -use cargo::util::process; - -use support::{Tap, cargo_dir, execs, shell_writes}; - -fn setup() { -} - -struct Sink(Arc>>); - -impl Write for Sink { - fn write(&mut self, data: &[u8]) -> io::Result { - Write::write(&mut *self.0.lock().unwrap(), data) - } - fn flush(&mut self) -> io::Result<()> { Ok(()) } -} - -test!(non_tty { - let config = ShellConfig { color_config: Auto, tty: false }; - let a = Arc::new(Mutex::new(Vec::new())); - - Shell::create(Box::new(Sink(a.clone())), config).tap(|shell| { - shell.say("Hey Alex", color::RED).unwrap(); - }); - let buf = a.lock().unwrap().clone(); - assert_that(&buf[..], shell_writes("Hey Alex\n")); -}); - -test!(color_explicitly_disabled { - let term = TerminfoTerminal::new(Vec::new()); - if term.is_none() { return } - - let config = ShellConfig { color_config: Never, tty: true }; - let a = Arc::new(Mutex::new(Vec::new())); - - Shell::create(Box::new(Sink(a.clone())), config).tap(|shell| { - shell.say("Hey Alex", color::RED).unwrap(); - }); - let buf = a.lock().unwrap().clone(); - assert_that(&buf[..], shell_writes("Hey Alex\n")); -}); - -test!(colored_shell { - let term = TerminfoTerminal::new(Vec::new()); - if term.is_none() { return } - - let config = ShellConfig { color_config: Auto, tty: true }; - let a = Arc::new(Mutex::new(Vec::new())); - - Shell::create(Box::new(Sink(a.clone())), config).tap(|shell| { - shell.say("Hey Alex", color::RED).unwrap(); - }); - let buf = a.lock().unwrap().clone(); - assert_that(&buf[..], - shell_writes(colored_output("Hey Alex\n", - color::RED).unwrap())); -}); - -test!(color_explicitly_enabled { - let term = TerminfoTerminal::new(Vec::new()); - if term.is_none() { return } - - let config = ShellConfig { color_config: Always, tty: false }; - let a = Arc::new(Mutex::new(Vec::new())); - - Shell::create(Box::new(Sink(a.clone())), config).tap(|shell| { - shell.say("Hey Alex", color::RED).unwrap(); - }); - let buf = a.lock().unwrap().clone(); - assert_that(&buf[..], - shell_writes(colored_output("Hey Alex\n", - color::RED).unwrap())); -}); - -test!(no_term { - // Verify that shell creation is successful when $TERM does not exist. - assert_that(process(&cargo_dir().join("cargo")).unwrap() - .env_remove("TERM"), - execs().with_stderr("")); -}); - -fn colored_output(string: &str, color: color::Color) -> io::Result { - let mut term = TerminfoTerminal::new(Vec::new()).unwrap(); - try!(term.reset()); - try!(term.fg(color)); - try!(write!(&mut term, "{}", string)); - try!(term.reset()); - try!(term.flush()); - Ok(String::from_utf8_lossy(term.get_ref()).to_string()) -} diff --git a/tests/tests.rs b/tests/tests.rs deleted file mode 100644 index fb3a124746f..00000000000 --- a/tests/tests.rs +++ /dev/null @@ -1,81 +0,0 @@ -extern crate bufstream; -extern crate cargo; -extern crate filetime; -extern crate flate2; -extern crate git2; -extern crate hamcrest; -extern crate libc; -extern crate rustc_serialize; -extern crate tar; -extern crate tempdir; -extern crate term; -extern crate url; -#[cfg(windows)] extern crate kernel32; -#[cfg(windows)] extern crate winapi; - -#[macro_use] -extern crate log; - -use cargo::util::Rustc; - -mod support; -macro_rules! test { - ($name:ident $expr:expr) => ( - #[test] - fn $name() { - ::support::paths::setup(); - setup(); - $expr; - } - ) -} - -mod test_bad_config; -mod test_bad_manifest_path; -mod test_cargo; -mod test_cargo_bench; -mod test_cargo_build_auth; -mod test_cargo_build_lib; -mod test_cargo_clean; -mod test_cargo_compile; -mod test_cargo_compile_custom_build; -mod test_cargo_compile_git_deps; -mod test_cargo_compile_path_deps; -mod test_cargo_compile_plugins; -mod test_cargo_cross_compile; -mod test_cargo_doc; -mod test_cargo_features; -mod test_cargo_fetch; -mod test_cargo_freshness; -mod test_cargo_generate_lockfile; -mod test_cargo_new; -mod test_cargo_package; -mod test_cargo_profiles; -mod test_cargo_publish; -mod test_cargo_read_manifest; -mod test_cargo_registry; -mod test_cargo_run; -mod test_cargo_rustc; -mod test_cargo_search; -mod test_cargo_test; -mod test_cargo_tool_paths; -mod test_cargo_verify_project; -mod test_cargo_version; -mod test_shell; - -thread_local!(static RUSTC: Rustc = Rustc::new("rustc").unwrap()); - -fn rustc_host() -> String { - RUSTC.with(|r| r.host.clone()) -} - -fn is_nightly() -> bool { - RUSTC.with(|r| { - r.verbose_version.contains("-nightly") || - r.verbose_version.contains("-dev") - }) -} - -fn can_panic() -> bool { - RUSTC.with(|r| !r.host.contains("msvc")) -} diff --git a/tests/testsuite/alt_registry.rs b/tests/testsuite/alt_registry.rs new file mode 100644 index 00000000000..edeb67a6b7d --- /dev/null +++ b/tests/testsuite/alt_registry.rs @@ -0,0 +1,1318 @@ +use crate::support::publish::validate_alt_upload; +use crate::support::registry::{self, Package}; +use crate::support::{basic_manifest, git, paths, project}; +use cargo::util::IntoUrl; +use std::fs::{self, File}; +use std::io::Write; + +#[cargo_test] +fn depend_on_alt_registry() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry::alt_registry_path().to_str().unwrap() + )) + .run(); + + p.cargo("clean").run(); + + // Don't download a second time + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn depend_on_alt_registry_depends_on_same_registry_no_index() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").alternative(true).publish(); + Package::new("bar", "0.0.1") + .registry_dep("baz", "0.0.1") + .alternative(true) + .publish(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[COMPILING] baz v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry::alt_registry_path().to_str().unwrap() + )) + .run(); +} + +#[cargo_test] +fn depend_on_alt_registry_depends_on_same_registry() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").alternative(true).publish(); + Package::new("bar", "0.0.1") + .registry_dep("baz", "0.0.1") + .alternative(true) + .publish(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[COMPILING] baz v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry::alt_registry_path().to_str().unwrap() + )) + .run(); +} + +#[cargo_test] +fn depend_on_alt_registry_depends_on_crates_io() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1") + .dep("baz", "0.0.1") + .alternative(true) + .publish(); + + p.cargo("build") + .with_stderr_unordered(&format!( + "\ +[UPDATING] `{alt_reg}` index +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] baz v0.0.1 (registry `[ROOT][..]`) +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + alt_reg = registry::alt_registry_path().to_str().unwrap(), + reg = registry::registry_path().to_str().unwrap() + )) + .run(); +} + +#[cargo_test] +fn registry_and_path_dep_works() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn registry_incompatible_with_git() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + " dependency (bar) specification is ambiguous. \ + Only one of `git` or `registry` is allowed.", + ) + .run(); +} + +#[cargo_test] +fn cannot_publish_to_crates_io_with_registry_dependency() { + let fakeio_path = paths::root().join("fake.io"); + let fakeio_url = fakeio_path.into_url().unwrap(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + &format!( + r#" + [registries.fakeio] + index = "{}" + "#, + fakeio_url + ), + ) + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + // Since this can't really call plain `publish` without fetching the real + // crates.io index, create a fake one that points to the real crates.io. + git::repo(&fakeio_path) + .file( + "config.json", + r#" + {"dl": "https://crates.io/api/v1/crates", "api": "https://crates.io"} + "#, + ) + .build(); + + // Login so that we have the token available + p.cargo("login --registry fakeio TOKEN").run(); + + p.cargo("publish --registry fakeio") + .with_status(101) + .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]") + .run(); + + p.cargo("publish --index") + .arg(fakeio_url.to_string()) + .with_status(101) + .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]") + .run(); +} + +#[cargo_test] +fn publish_with_registry_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + // Login so that we have the token available + p.cargo("login --registry alternative TOKEN").run(); + + p.cargo("publish --registry alternative").run(); + + validate_alt_upload( + r#"{ + "authors": [], + "badges": {}, + "categories": [], + "deps": [ + { + "default_features": true, + "features": [], + "kind": "normal", + "name": "bar", + "optional": false, + "target": null, + "version_req": "^0.0.1" + } + ], + "description": null, + "documentation": null, + "features": {}, + "homepage": null, + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "name": "foo", + "readme": null, + "readme_file": null, + "repository": null, + "vers": "0.0.1" + }"#, + "foo-0.0.1.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + ); +} + +#[cargo_test] +fn alt_registry_and_crates_io_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + crates_io_dep = "0.0.1" + + [dependencies.alt_reg_dep] + version = "0.1.0" + registry = "alternative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("crates_io_dep", "0.0.1").publish(); + Package::new("alt_reg_dep", "0.1.0") + .alternative(true) + .publish(); + + p.cargo("build") + .with_stderr_contains(format!( + "[UPDATING] `{}` index", + registry::alt_registry_path().to_str().unwrap() + )) + .with_stderr_contains(&format!( + "[UPDATING] `{}` index", + registry::registry_path().to_str().unwrap() + )) + .with_stderr_contains("[DOWNLOADED] crates_io_dep v0.0.1 (registry `[ROOT][..]`)") + .with_stderr_contains("[DOWNLOADED] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)") + .with_stderr_contains("[COMPILING] alt_reg_dep v0.1.0 (registry `[ROOT][..]`)") + .with_stderr_contains("[COMPILING] crates_io_dep v0.0.1") + .with_stderr_contains("[COMPILING] foo v0.0.1 ([CWD])") + .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s") + .run(); +} + +#[cargo_test] +fn block_publish_due_to_no_token() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + // Setup the registry by publishing a package + Package::new("bar", "0.0.1").alternative(true).publish(); + + fs::remove_file(paths::home().join(".cargo/credentials")).unwrap(); + + // Now perform the actual publish + p.cargo("publish --registry alternative") + .with_status(101) + .with_stderr_contains("error: no upload token found, please run `cargo login`") + .run(); +} + +#[cargo_test] +fn publish_to_alt_registry() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + // Setup the registry by publishing a package + Package::new("bar", "0.0.1").alternative(true).publish(); + + // Login so that we have the token available + p.cargo("login --registry alternative TOKEN").run(); + + // Now perform the actual publish + p.cargo("publish --registry alternative").run(); + + validate_alt_upload( + r#"{ + "authors": [], + "badges": {}, + "categories": [], + "deps": [], + "description": null, + "documentation": null, + "features": {}, + "homepage": null, + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "name": "foo", + "readme": null, + "readme_file": null, + "repository": null, + "vers": "0.0.1" + }"#, + "foo-0.0.1.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + ); +} + +#[cargo_test] +fn publish_with_crates_io_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = ["me"] + license = "MIT" + description = "foo" + + [dependencies.bar] + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + // Login so that we have the token available + p.cargo("login --registry alternative TOKEN").run(); + + p.cargo("publish --registry alternative").run(); + + validate_alt_upload( + r#"{ + "authors": ["me"], + "badges": {}, + "categories": [], + "deps": [ + { + "default_features": true, + "features": [], + "kind": "normal", + "name": "bar", + "optional": false, + "registry": "https://github.com/rust-lang/crates.io-index", + "target": null, + "version_req": "^0.0.1" + } + ], + "description": "foo", + "documentation": null, + "features": {}, + "homepage": null, + "keywords": [], + "license": "MIT", + "license_file": null, + "links": null, + "name": "foo", + "readme": null, + "readme_file": null, + "repository": null, + "vers": "0.0.1" + }"#, + "foo-0.0.1.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + ); +} + +#[cargo_test] +fn passwords_in_registry_index_url_forbidden() { + registry::init(); + + let config = paths::home().join(".cargo/config"); + + File::create(config) + .unwrap() + .write_all( + br#" + [registry] + index = "ssh://git:secret@foobar.com" + "#, + ) + .unwrap(); + + let p = project().file("src/main.rs", "fn main() {}").build(); + + p.cargo("publish") + .with_status(101) + .with_stderr_contains("error: Registry URLs may not contain passwords") + .run(); +} + +#[cargo_test] +fn passwords_in_registries_index_url_forbidden() { + registry::init(); + + let config = paths::home().join(".cargo/config"); + + File::create(config) + .unwrap() + .write_all( + br#" + [registries.alternative] + index = "ssh://git:secret@foobar.com" + "#, + ) + .unwrap(); + + let p = project().file("src/main.rs", "fn main() {}").build(); + + p.cargo("publish --registry alternative") + .with_status(101) + .with_stderr_contains("error: Registry URLs may not contain passwords") + .run(); +} + +#[cargo_test] +fn patch_alt_reg() { + Package::new("bar", "0.1.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { version = "0.1.0", registry = "alternative" } + + [patch.alternative] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate bar; + pub fn f() { bar::bar(); } + ", + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[COMPILING] bar v0.1.0 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn bad_registry_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "bad name" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[CWD]/Cargo.toml` + +Caused by: + Invalid character ` ` in registry name: `bad name`", + ) + .run(); + + for cmd in &[ + "init", + "install foo", + "login", + "owner", + "publish", + "search", + "yank", + ] { + p.cargo(cmd) + .arg("--registry") + .arg("bad name") + .with_status(101) + .with_stderr("[ERROR] Invalid character ` ` in registry name: `bad name`") + .run(); + } +} + +#[cargo_test] +fn no_api() { + Package::new("bar", "0.0.1").alternative(true).publish(); + // Configure without `api`. + let repo = git2::Repository::open(registry::alt_registry_path()).unwrap(); + let cfg_path = registry::alt_registry_path().join("config.json"); + fs::write( + cfg_path, + format!(r#"{{"dl": "{}"}}"#, registry::alt_dl_url()), + ) + .unwrap(); + git::add(&repo); + git::commit(&repo); + + // First check that a dependency works. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies.bar] + version = "0.0.1" + registry = "alternative" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry::alt_registry_path().to_str().unwrap() + )) + .run(); + + // Check all of the API commands. + let err = format!( + "[ERROR] registry `{}` does not support API commands", + registry::alt_registry_path().display() + ); + + p.cargo("login --registry alternative TOKEN") + .with_status(101) + .with_stderr_contains(&err) + .run(); + + p.cargo("publish --registry alternative") + .with_status(101) + .with_stderr_contains(&err) + .run(); + + p.cargo("search --registry alternative") + .with_status(101) + .with_stderr_contains(&err) + .run(); + + p.cargo("owner --registry alternative --list") + .with_status(101) + .with_stderr_contains(&err) + .run(); + + p.cargo("yank --registry alternative --vers=0.0.1 bar") + .with_status(101) + .with_stderr_contains(&err) + .run(); + + p.cargo("yank --registry alternative --vers=0.0.1 bar") + .with_stderr_contains(&err) + .with_status(101) + .run(); +} + +#[cargo_test] +fn alt_reg_metadata() { + // Check for "registry" entries in `cargo metadata` with alternative registries. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + altdep = { version = "0.0.1", registry = "alternative" } + iodep = { version = "0.0.1" } + "#, + ) + .file("src/lib.rs", "") + .build(); + + Package::new("bar", "0.0.1").publish(); + Package::new("altdep", "0.0.1") + .dep("bar", "0.0.1") + .alternative(true) + .publish(); + Package::new("altdep2", "0.0.1").alternative(true).publish(); + Package::new("iodep", "0.0.1") + .registry_dep("altdep2", "0.0.1") + .publish(); + + // The important thing to check here is the "registry" value in `deps`. + // They should be: + // foo -> altdep: alternative-registry + // foo -> iodep: null (because it is in crates.io) + // altdep -> bar: null (because it is in crates.io) + // iodep -> altdep2: alternative-registry + p.cargo("metadata --format-version=1 --no-deps") + .with_json( + r#" + { + "packages": [ + { + "name": "foo", + "version": "0.0.1", + "id": "foo 0.0.1 (path+file:[..]/foo)", + "license": null, + "license_file": null, + "description": null, + "source": null, + "dependencies": [ + { + "name": "altdep", + "source": "registry+file:[..]/alternative-registry", + "req": "^0.0.1", + "kind": null, + "rename": null, + "optional": false, + "uses_default_features": true, + "features": [], + "target": null, + "registry": "file:[..]/alternative-registry" + }, + { + "name": "iodep", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "req": "^0.0.1", + "kind": null, + "rename": null, + "optional": false, + "uses_default_features": true, + "features": [], + "target": null, + "registry": null + } + ], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]/foo/Cargo.toml", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + } + ], + "workspace_members": [ + "foo 0.0.1 (path+file:[..]/foo)" + ], + "resolve": null, + "target_directory": "[..]/foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); + + // --no-deps uses a different code path, make sure both work. + p.cargo("metadata --format-version=1") + .with_json( + r#" + { + "packages": [ + { + "name": "altdep2", + "version": "0.0.1", + "id": "altdep2 0.0.1 (registry+file:[..]/alternative-registry)", + "license": null, + "license_file": null, + "description": null, + "source": "registry+file:[..]/alternative-registry", + "dependencies": [], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]/altdep2-0.0.1/Cargo.toml", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + }, + { + "name": "altdep", + "version": "0.0.1", + "id": "altdep 0.0.1 (registry+file:[..]/alternative-registry)", + "license": null, + "license_file": null, + "description": null, + "source": "registry+file:[..]/alternative-registry", + "dependencies": [ + { + "name": "bar", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "req": "^0.0.1", + "kind": null, + "rename": null, + "optional": false, + "uses_default_features": true, + "features": [], + "target": null, + "registry": null + } + ], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]/altdep-0.0.1/Cargo.toml", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + }, + { + "name": "foo", + "version": "0.0.1", + "id": "foo 0.0.1 (path+file:[..]/foo)", + "license": null, + "license_file": null, + "description": null, + "source": null, + "dependencies": [ + { + "name": "altdep", + "source": "registry+file:[..]/alternative-registry", + "req": "^0.0.1", + "kind": null, + "rename": null, + "optional": false, + "uses_default_features": true, + "features": [], + "target": null, + "registry": "file:[..]/alternative-registry" + }, + { + "name": "iodep", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "req": "^0.0.1", + "kind": null, + "rename": null, + "optional": false, + "uses_default_features": true, + "features": [], + "target": null, + "registry": null + } + ], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]/foo/Cargo.toml", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + }, + { + "name": "iodep", + "version": "0.0.1", + "id": "iodep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "license": null, + "license_file": null, + "description": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "dependencies": [ + { + "name": "altdep2", + "source": "registry+file:[..]/alternative-registry", + "req": "^0.0.1", + "kind": null, + "rename": null, + "optional": false, + "uses_default_features": true, + "features": [], + "target": null, + "registry": "file:[..]/alternative-registry" + } + ], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]/iodep-0.0.1/Cargo.toml", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + }, + { + "name": "bar", + "version": "0.0.1", + "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "license": null, + "license_file": null, + "description": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "dependencies": [], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]/bar-0.0.1/Cargo.toml", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + } + ], + "workspace_members": [ + "foo 0.0.1 (path+file:[..]/foo)" + ], + "resolve": "{...}", + "target_directory": "[..]/foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn unknown_registry() { + // A known registry refers to an unknown registry. + // foo -> bar(crates.io) -> baz(alt) + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").alternative(true).publish(); + Package::new("bar", "0.0.1") + .registry_dep("baz", "0.0.1") + .publish(); + + // Remove "alternative" from config. + let cfg_path = paths::home().join(".cargo/config"); + let mut config = fs::read_to_string(&cfg_path).unwrap(); + let start = config.find("[registries.alternative]").unwrap(); + config.insert(start, '#'); + let start_index = &config[start..].find("index =").unwrap(); + config.insert(start + start_index, '#'); + fs::write(&cfg_path, config).unwrap(); + + p.cargo("build").run(); + + // Important parts: + // foo -> bar registry = null + // bar -> baz registry = alternate + p.cargo("metadata --format-version=1") + .with_json( + r#" + { + "packages": [ + { + "name": "baz", + "version": "0.0.1", + "id": "baz 0.0.1 (registry+file://[..]/alternative-registry)", + "license": null, + "license_file": null, + "description": null, + "source": "registry+file://[..]/alternative-registry", + "dependencies": [], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + }, + { + "name": "foo", + "version": "0.0.1", + "id": "foo 0.0.1 (path+file://[..]/foo)", + "license": null, + "license_file": null, + "description": null, + "source": null, + "dependencies": [ + { + "name": "bar", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "req": "^0.0.1", + "kind": null, + "rename": null, + "optional": false, + "uses_default_features": true, + "features": [], + "target": null, + "registry": null + } + ], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]/foo/Cargo.toml", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + }, + { + "name": "bar", + "version": "0.0.1", + "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "license": null, + "license_file": null, + "description": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "dependencies": [ + { + "name": "baz", + "source": "registry+file://[..]/alternative-registry", + "req": "^0.0.1", + "kind": null, + "rename": null, + "optional": false, + "uses_default_features": true, + "features": [], + "target": null, + "registry": "file:[..]/alternative-registry" + } + ], + "targets": "{...}", + "features": {}, + "manifest_path": "[..]", + "metadata": null, + "authors": [], + "categories": [], + "keywords": [], + "readme": null, + "repository": null, + "edition": "2015", + "links": null + } + ], + "workspace_members": [ + "foo 0.0.1 (path+file://[..]/foo)" + ], + "resolve": "{...}", + "target_directory": "[..]/foo/target", + "version": 1, + "workspace_root": "[..]/foo" + } + "#, + ) + .run(); +} + +#[cargo_test] +fn registries_index_relative_url() { + let config = paths::root().join(".cargo/config"); + fs::create_dir_all(config.parent().unwrap()).unwrap(); + File::create(&config) + .unwrap() + .write_all( + br#" + [registries.relative] + index = "file:alternative-registry" + "#, + ) + .unwrap(); + + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "relative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry::alt_registry_path().to_str().unwrap() + )) + .run(); +} + +#[cargo_test] +fn registry_index_relative_url() { + let config = paths::root().join(".cargo/config"); + fs::create_dir_all(config.parent().unwrap()).unwrap(); + File::create(&config) + .unwrap() + .write_all( + br#" + [registry] + index = "file:alternative-registry" + "#, + ) + .unwrap(); + + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + fs::remove_file(paths::home().join(".cargo/config")).unwrap(); + + p.cargo("build") + .with_stderr(&format!( + "\ +warning: custom registry support via the `registry.index` configuration is being removed, this functionality will not work in the future +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry::alt_registry_path().to_str().unwrap() + )) + .run(); +} + +#[cargo_test] +fn registries_index_relative_path_not_allowed() { + let config = paths::root().join(".cargo/config"); + fs::create_dir_all(config.parent().unwrap()).unwrap(); + File::create(&config) + .unwrap() + .write_all( + br#" + [registries.relative] + index = "alternative-registry" + "#, + ) + .unwrap(); + + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "0.0.1" + registry = "relative" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").alternative(true).publish(); + + p.cargo("build") + .with_stderr(&format!( + "\ +error: failed to parse manifest at `{root}/foo/Cargo.toml` + +Caused by: + invalid url `alternative-registry`: relative URL without a base +", + root = paths::root().to_str().unwrap() + )) + .with_status(101) + .run(); +} diff --git a/tests/testsuite/bad_config.rs b/tests/testsuite/bad_config.rs new file mode 100644 index 00000000000..e6dd6610ece --- /dev/null +++ b/tests/testsuite/bad_config.rs @@ -0,0 +1,1303 @@ +use crate::support::registry::Package; +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn bad1() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [target] + nonexistent-target = "foo" + "#, + ) + .build(); + p.cargo("build -v --target=nonexistent-target") + .with_status(101) + .with_stderr( + "\ +[ERROR] expected table for configuration key `target.nonexistent-target`, \ +but found string in [..]config +", + ) + .run(); +} + +#[cargo_test] +fn bad2() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [http] + proxy = 3.0 + "#, + ) + .build(); + p.cargo("publish -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] could not load Cargo configuration + +Caused by: + failed to load TOML configuration from `[..]config` + +Caused by: + failed to parse key `http` + +Caused by: + failed to parse key `proxy` + +Caused by: + found TOML configuration value of unknown type `float` +", + ) + .run(); +} + +#[cargo_test] +fn bad3() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [http] + proxy = true + "#, + ) + .build(); + Package::new("foo", "1.0.0").publish(); + + p.cargo("publish -v") + .with_status(101) + .with_stderr( + "\ +error: failed to update registry [..] + +Caused by: + error in [..]config: `http.proxy` expected a string, but found a boolean +", + ) + .run(); +} + +#[cargo_test] +fn bad4() { + let p = project() + .file( + ".cargo/config", + r#" + [cargo-new] + name = false + "#, + ) + .build(); + p.cargo("new -v foo") + .with_status(101) + .with_stderr( + "\ +[ERROR] Failed to create package `foo` at `[..]` + +Caused by: + error in [..]config: `cargo-new.name` expected a string, but found a boolean +", + ) + .run(); +} + +#[cargo_test] +fn bad6() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [http] + user-agent = true + "#, + ) + .build(); + Package::new("foo", "1.0.0").publish(); + + p.cargo("publish -v") + .with_status(101) + .with_stderr( + "\ +error: failed to update registry [..] + +Caused by: + error in [..]config: `http.user-agent` expected a string, but found a boolean +", + ) + .run(); +} + +#[cargo_test] +fn bad_cargo_config_jobs() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + jobs = -1 + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] error in [..].cargo/config: \ +could not load config key `build.jobs`: \ +invalid value: integer `-1`, expected u32 +", + ) + .run(); +} + +#[cargo_test] +fn default_cargo_config_jobs() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + jobs = 1 + "#, + ) + .build(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn good_cargo_config_jobs() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + jobs = 4 + "#, + ) + .build(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn invalid_global_config() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file(".cargo/config", "4") + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] could not load Cargo configuration + +Caused by: + could not parse TOML configuration in `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected an equals, found eof at line 1 +", + ) + .run(); +} + +#[cargo_test] +fn bad_cargo_lock() { + let p = project() + .file("Cargo.lock", "[[package]]\nfoo = 92") + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse lock file at: [..]Cargo.lock + +Caused by: + missing field `name` for key `package` +", + ) + .run(); +} + +#[cargo_test] +fn duplicate_packages_in_cargo_lock() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.0.1" + dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "bar" + version = "0.1.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + + [[package]] + name = "bar" + version = "0.1.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse lock file at: [..] + +Caused by: + package `bar` is specified twice in the lockfile +", + ) + .run(); +} + +#[cargo_test] +fn bad_source_in_cargo_lock() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.0.1" + dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "bar" + version = "0.1.0" + source = "You shall not parse" + "#, + ) + .build(); + + p.cargo("build --verbose") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse lock file at: [..] + +Caused by: + invalid source `You shall not parse` for key `package.source` +", + ) + .run(); +} + +#[cargo_test] +fn bad_dependency_in_lockfile() { + let p = project() + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.0.1" + dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + "#, + ) + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn bad_git_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + foo = { git = "file:.." } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[UPDATING] git repository `file:///` +[ERROR] failed to load source for a dependency on `foo` + +Caused by: + Unable to update file:/// + +Caused by: + failed to clone into: [..] + +Caused by: + [..]'file:///' is not a valid local file URI[..] +", + ) + .run(); +} + +#[cargo_test] +fn bad_crate_type() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + crate-type = ["bad_type", "rlib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "error: failed to run `rustc` to learn about crate-type bad_type information", + ) + .run(); +} + +#[cargo_test] +fn malformed_override() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [target.x86_64-apple-darwin.freetype] + native = { + foo: "bar" + } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected a table key, found a newline at line 8 +", + ) + .run(); +} + +#[cargo_test] +fn duplicate_binary_names() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[bin]] + name = "e" + path = "a.rs" + + [[bin]] + name = "e" + path = "b.rs" + "#, + ) + .file("a.rs", r#"fn main() -> () {}"#) + .file("b.rs", r#"fn main() -> () {}"#) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate binary name e, but all binary targets must have a unique name +", + ) + .run(); +} + +#[cargo_test] +fn duplicate_example_names() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[example]] + name = "ex" + path = "examples/ex.rs" + + [[example]] + name = "ex" + path = "examples/ex2.rs" + "#, + ) + .file("examples/ex.rs", r#"fn main () -> () {}"#) + .file("examples/ex2.rs", r#"fn main () -> () {}"#) + .build(); + + p.cargo("build --example ex") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate example name ex, but all example targets must have a unique name +", + ) + .run(); +} + +#[cargo_test] +fn duplicate_bench_names() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.1.0" + authors = ["A "] + + [[bench]] + name = "ex" + path = "benches/ex.rs" + + [[bench]] + name = "ex" + path = "benches/ex2.rs" + "#, + ) + .file("benches/ex.rs", r#"fn main () {}"#) + .file("benches/ex2.rs", r#"fn main () {}"#) + .build(); + + p.cargo("bench") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + found duplicate bench name ex, but all bench targets must have a unique name +", + ) + .run(); +} + +#[cargo_test] +fn duplicate_deps() { + let p = project() + .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("shim-bar/src/lib.rs", "pub fn a() {}") + .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("linux-bar/src/lib.rs", "pub fn a() {}") + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "shim-bar" } + + [target.x86_64-unknown-linux-gnu.dependencies] + bar = { path = "linux-bar" } + "#, + ) + .file("src/main.rs", r#"fn main () {}"#) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dependency 'bar' has different source paths depending on the build target. Each dependency must \ +have a single canonical source path irrespective of build target. +", + ) + .run(); +} + +#[cargo_test] +fn duplicate_deps_diff_sources() { + let p = project() + .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("shim-bar/src/lib.rs", "pub fn a() {}") + .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("linux-bar/src/lib.rs", "pub fn a() {}") + .file( + "Cargo.toml", + r#" + [package] + name = "qqq" + version = "0.0.1" + authors = [] + + [target.i686-unknown-linux-gnu.dependencies] + bar = { path = "shim-bar" } + + [target.x86_64-unknown-linux-gnu.dependencies] + bar = { path = "linux-bar" } + "#, + ) + .file("src/main.rs", r#"fn main () {}"#) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dependency 'bar' has different source paths depending on the build target. Each dependency must \ +have a single canonical source path irrespective of build target. +", + ) + .run(); +} + +#[cargo_test] +fn unused_keys() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [target.foo] + bar = "3" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +warning: unused manifest key: target.foo.bar +[COMPILING] foo v0.1.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [profile.debug] + debug = 1 + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +warning: unused manifest key: profile.debug +warning: use `[profile.dev]` to configure debug builds +[..] +[..]", + ) + .run(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + bulid = "foo" + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + p.cargo("build") + .with_stderr( + "\ +warning: unused manifest key: project.bulid +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + let p = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + build = "foo" + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + p.cargo("build") + .with_stderr( + "\ +warning: unused manifest key: lib.build +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn unused_keys_in_virtual_manifest() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + bulid = "foo" + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", r"") + .build(); + p.cargo("build --all") + .with_stderr( + "\ +[WARNING] [..]/foo/Cargo.toml: unused manifest key: workspace.bulid +[COMPILING] bar [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn empty_dependencies() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = {} + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + p.cargo("build") + .with_stderr_contains( + "\ +warning: dependency (bar) specified without providing a local path, Git repository, or version \ +to use. This will be considered an error in future versions +", + ) + .run(); +} + +#[cargo_test] +fn invalid_toml_historically_allowed_is_warned() { + let p = project() + .file(".cargo/config", "[bar] baz = 2") + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +warning: TOML file found which contains invalid syntax and will soon not parse +at `[..]config`. + +The TOML spec requires newlines after table definitions (e.g., `[a] b = 1` is +invalid), but this file has a table header which does not have a newline after +it. A newline needs to be added and this warning will soon become a hard error +in the future. +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn ambiguous_git_reference() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = "http://127.0.0.1" + branch = "master" + tag = "some-tag" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ +[WARNING] dependency (bar) specification is ambiguous. \ +Only one of `branch`, `tag` or `rev` is allowed. \ +This will be considered an error in future versions +", + ) + .run(); +} + +#[cargo_test] +fn bad_source_config1() { + let p = project() + .file("src/lib.rs", "") + .file(".cargo/config", "[source.foo]") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr("error: no source URL specified for `source.foo`, need [..]") + .run(); +} + +#[cargo_test] +fn bad_source_config2() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry `https://[..]` + +Caused by: + could not find a configured source with the name `bar` \ + when attempting to lookup `crates-io` (configuration in [..]) +", + ) + .run(); +} + +#[cargo_test] +fn bad_source_config3() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'https://example.com' + replace-with = 'crates-io' + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry `https://[..]` + +Caused by: + detected a cycle of `replace-with` sources, [..] +", + ) + .run(); +} + +#[cargo_test] +fn bad_source_config4() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'https://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'https://example.com' + replace-with = 'crates-io' + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry `https://[..]` + +Caused by: + detected a cycle of `replace-with` sources, the source `crates-io` is \ + eventually replaced with itself (configuration in [..]) +", + ) + .run(); +} + +#[cargo_test] +fn bad_source_config5() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'https://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'not a url' + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: configuration key `source.bar.registry` specified an invalid URL (in [..]) + +Caused by: + invalid url `not a url`: [..] +", + ) + .run(); +} + +#[cargo_test] +fn both_git_and_path_specified() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = "http://127.0.0.1" + path = "bar" + "#, + ) + .file("src/lib.rs", "") + .build(); + + foo.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ +[WARNING] dependency (bar) specification is ambiguous. \ +Only one of `git` or `path` is allowed. \ +This will be considered an error in future versions +", + ) + .run(); +} + +#[cargo_test] +fn bad_source_config6() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'https://example.com' + replace-with = ['not', 'a', 'string'] + "#, + ) + .build(); + + p.cargo("build").with_status(101).with_stderr( + "error: expected a string, but found a array for `source.crates-io.replace-with` in [..]", + ) + .run(); +} + +#[cargo_test] +fn ignored_git_revision() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + path = "bar" + branch = "spam" + "#, + ) + .file("src/lib.rs", "") + .build(); + + foo.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "[WARNING] key `branch` is ignored for dependency (bar). \ + This will be considered an error in future versions", + ) + .run(); +} + +#[cargo_test] +fn bad_source_config7() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.foo] + registry = 'https://example.com' + local-registry = 'file:///another/file' + "#, + ) + .build(); + + Package::new("bar", "0.1.0").publish(); + + p.cargo("build") + .with_status(101) + .with_stderr("error: more than one source URL specified for `source.foo`") + .run(); +} + +#[cargo_test] +fn bad_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = 3 + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: integer `3`, expected a version string like [..] +", + ) + .run(); +} + +#[cargo_test] +fn bad_debuginfo() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [profile.dev] + debug = 'a' + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: string \"a\", expected a boolean or an integer for [..] +", + ) + .run(); +} + +#[cargo_test] +fn bad_opt_level() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 3 + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + invalid type: integer `3`, expected a boolean or a string for key [..] +", + ) + .run(); +} + +#[cargo_test] +fn warn_semver_metadata() { + Package::new("bar", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + + [dependencies] + bar = "1.0.0+1234" + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("check") + .with_stderr_contains("[WARNING] version requirement `1.0.0+1234` for dependency `bar`[..]") + .run(); +} diff --git a/tests/testsuite/bad_manifest_path.rs b/tests/testsuite/bad_manifest_path.rs new file mode 100644 index 00000000000..83990a5a7be --- /dev/null +++ b/tests/testsuite/bad_manifest_path.rs @@ -0,0 +1,382 @@ +use crate::support::{basic_bin_manifest, main_file, project}; + +fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo(command) + .arg("--manifest-path") + .arg(manifest_path_argument) + .cwd(p.root().parent().unwrap()) + .with_status(101) + .with_stderr( + "[ERROR] the manifest-path must be a path \ + to a Cargo.toml file", + ) + .run(); +} + +fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) { + let p = project().build(); + let expected_path = manifest_path_argument + .split('/') + .collect::>() + .join("[..]"); + + p.cargo(command) + .arg("--manifest-path") + .arg(manifest_path_argument) + .cwd(p.root().parent().unwrap()) + .with_status(101) + .with_stderr(format!( + "[ERROR] manifest path `{}` does not exist", + expected_path + )) + .run(); +} + +#[cargo_test] +fn bench_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("bench", "foo"); +} + +#[cargo_test] +fn bench_dir_plus_file() { + assert_not_a_cargo_toml("bench", "foo/bar"); +} + +#[cargo_test] +fn bench_dir_plus_path() { + assert_not_a_cargo_toml("bench", "foo/bar/baz"); +} + +#[cargo_test] +fn bench_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn build_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("build", "foo"); +} + +#[cargo_test] +fn build_dir_plus_file() { + assert_not_a_cargo_toml("bench", "foo/bar"); +} + +#[cargo_test] +fn build_dir_plus_path() { + assert_not_a_cargo_toml("bench", "foo/bar/baz"); +} + +#[cargo_test] +fn build_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("build", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn clean_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("clean", "foo"); +} + +#[cargo_test] +fn clean_dir_plus_file() { + assert_not_a_cargo_toml("clean", "foo/bar"); +} + +#[cargo_test] +fn clean_dir_plus_path() { + assert_not_a_cargo_toml("clean", "foo/bar/baz"); +} + +#[cargo_test] +fn clean_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn doc_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("doc", "foo"); +} + +#[cargo_test] +fn doc_dir_plus_file() { + assert_not_a_cargo_toml("doc", "foo/bar"); +} + +#[cargo_test] +fn doc_dir_plus_path() { + assert_not_a_cargo_toml("doc", "foo/bar/baz"); +} + +#[cargo_test] +fn doc_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn fetch_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("fetch", "foo"); +} + +#[cargo_test] +fn fetch_dir_plus_file() { + assert_not_a_cargo_toml("fetch", "foo/bar"); +} + +#[cargo_test] +fn fetch_dir_plus_path() { + assert_not_a_cargo_toml("fetch", "foo/bar/baz"); +} + +#[cargo_test] +fn fetch_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn generate_lockfile_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("generate-lockfile", "foo"); +} + +#[cargo_test] +fn generate_lockfile_dir_plus_file() { + assert_not_a_cargo_toml("generate-lockfile", "foo/bar"); +} + +#[cargo_test] +fn generate_lockfile_dir_plus_path() { + assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz"); +} + +#[cargo_test] +fn generate_lockfile_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn package_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("package", "foo"); +} + +#[cargo_test] +fn package_dir_plus_file() { + assert_not_a_cargo_toml("package", "foo/bar"); +} + +#[cargo_test] +fn package_dir_plus_path() { + assert_not_a_cargo_toml("package", "foo/bar/baz"); +} + +#[cargo_test] +fn package_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn pkgid_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("pkgid", "foo"); +} + +#[cargo_test] +fn pkgid_dir_plus_file() { + assert_not_a_cargo_toml("pkgid", "foo/bar"); +} + +#[cargo_test] +fn pkgid_dir_plus_path() { + assert_not_a_cargo_toml("pkgid", "foo/bar/baz"); +} + +#[cargo_test] +fn pkgid_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn publish_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("publish", "foo"); +} + +#[cargo_test] +fn publish_dir_plus_file() { + assert_not_a_cargo_toml("publish", "foo/bar"); +} + +#[cargo_test] +fn publish_dir_plus_path() { + assert_not_a_cargo_toml("publish", "foo/bar/baz"); +} + +#[cargo_test] +fn publish_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn read_manifest_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("read-manifest", "foo"); +} + +#[cargo_test] +fn read_manifest_dir_plus_file() { + assert_not_a_cargo_toml("read-manifest", "foo/bar"); +} + +#[cargo_test] +fn read_manifest_dir_plus_path() { + assert_not_a_cargo_toml("read-manifest", "foo/bar/baz"); +} + +#[cargo_test] +fn read_manifest_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn run_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("run", "foo"); +} + +#[cargo_test] +fn run_dir_plus_file() { + assert_not_a_cargo_toml("run", "foo/bar"); +} + +#[cargo_test] +fn run_dir_plus_path() { + assert_not_a_cargo_toml("run", "foo/bar/baz"); +} + +#[cargo_test] +fn run_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn rustc_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("rustc", "foo"); +} + +#[cargo_test] +fn rustc_dir_plus_file() { + assert_not_a_cargo_toml("rustc", "foo/bar"); +} + +#[cargo_test] +fn rustc_dir_plus_path() { + assert_not_a_cargo_toml("rustc", "foo/bar/baz"); +} + +#[cargo_test] +fn rustc_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn test_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("test", "foo"); +} + +#[cargo_test] +fn test_dir_plus_file() { + assert_not_a_cargo_toml("test", "foo/bar"); +} + +#[cargo_test] +fn test_dir_plus_path() { + assert_not_a_cargo_toml("test", "foo/bar/baz"); +} + +#[cargo_test] +fn test_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn update_dir_containing_cargo_toml() { + assert_not_a_cargo_toml("update", "foo"); +} + +#[cargo_test] +fn update_dir_plus_file() { + assert_not_a_cargo_toml("update", "foo/bar"); +} + +#[cargo_test] +fn update_dir_plus_path() { + assert_not_a_cargo_toml("update", "foo/bar/baz"); +} + +#[cargo_test] +fn update_dir_to_nonexistent_cargo_toml() { + assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml"); +} + +#[cargo_test] +fn verify_project_dir_containing_cargo_toml() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("verify-project --manifest-path foo") + .cwd(p.root().parent().unwrap()) + .with_status(1) + .with_stdout( + "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ", + ) + .run(); +} + +#[cargo_test] +fn verify_project_dir_plus_file() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("verify-project --manifest-path foo/bar") + .cwd(p.root().parent().unwrap()) + .with_status(1) + .with_stdout( + "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ", + ) + .run(); +} + +#[cargo_test] +fn verify_project_dir_plus_path() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("verify-project --manifest-path foo/bar/baz") + .cwd(p.root().parent().unwrap()) + .with_status(1) + .with_stdout( + "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ + ", + ) + .run(); +} + +#[cargo_test] +fn verify_project_dir_to_nonexistent_cargo_toml() { + let p = project().build(); + p.cargo("verify-project --manifest-path foo/bar/baz/Cargo.toml") + .cwd(p.root().parent().unwrap()) + .with_status(1) + .with_stdout( + "{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\ + ", + ) + .run(); +} diff --git a/tests/testsuite/bench.rs b/tests/testsuite/bench.rs new file mode 100644 index 00000000000..42f4415279d --- /dev/null +++ b/tests/testsuite/bench.rs @@ -0,0 +1,1632 @@ +use crate::support::is_nightly; +use crate::support::paths::CargoPathExt; +use crate::support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project}; + +#[cargo_test] +fn cargo_bench_simple() { + if !is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "hello") + }"#, + ) + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("hello\n").run(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test bench_hello ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_bench_implicit() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "src/main.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { } + fn main() { println!("Hello main!"); }"#, + ) + .file( + "tests/other.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/mybench.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + p.cargo("bench --benches") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE] +[RUNNING] target/release/deps/mybench-[..][EXE] +", + ) + .with_stdout_contains("test run2 ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_bin_implicit() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "src/main.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { } + fn main() { println!("Hello main!"); }"#, + ) + .file( + "tests/other.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/mybench.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + p.cargo("bench --bins") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE] +", + ) + .with_stdout_contains("test run1 ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_tarname() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "benches/bin1.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/bin2.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + p.cargo("bench --bench bin2") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/bin2-[..][EXE] +", + ) + .with_stdout_contains("test run2 ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_multiple_targets() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "benches/bin1.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/bin2.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run2(_ben: &mut test::Bencher) { }"#, + ) + .file( + "benches/bin3.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run3(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + p.cargo("bench --bench bin1 --bench bin2") + .with_stdout_contains("test run1 ... bench: [..]") + .with_stdout_contains("test run2 ... bench: [..]") + .with_stdout_does_not_contain("[..]run3[..]") + .run(); +} + +#[cargo_test] +fn cargo_bench_verbose() { + if !is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn main() {} + #[bench] fn bench_hello(_b: &mut test::Bencher) {} + "#, + ) + .build(); + + p.cargo("bench -v hello") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc [..] src/main.rs [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target/release/deps/foo-[..][EXE] hello --bench`", + ) + .with_stdout_contains("test bench_hello ... bench: [..]") + .run(); +} + +#[cargo_test] +fn many_similar_names() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "src/lib.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate test; + pub fn foo() {} + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + ", + ) + .file( + "src/main.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate foo; + #[cfg(test)] + extern crate test; + fn main() {} + #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() } + ", + ) + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate foo; + extern crate test; + #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() } + "#, + ) + .build(); + + p.cargo("bench") + .with_stdout_contains("test bin_bench ... bench: 0 ns/iter (+/- 0)") + .with_stdout_contains("test lib_bench ... bench: 0 ns/iter (+/- 0)") + .with_stdout_contains("test bench_bench ... bench: 0 ns/iter (+/- 0)") + .run(); +} + +#[cargo_test] +fn cargo_bench_failing_test() { + if !is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "nope") + }"#, + ) + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("hello\n").run(); + + // Force libtest into serial execution so that the test header will be printed. + p.cargo("bench -- --test-threads=1") + .with_stdout_contains("test bench_hello ...[..]") + .with_stderr_contains( + "\ +[COMPILING] foo v0.5.0 ([CWD])[..] +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_either_contains( + "[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]", + ) + .with_either_contains("[..]left: `\"hello\"`[..]") + .with_either_contains("[..]right: `\"nope\"`[..]") + .with_either_contains("[..]src/main.rs:15[..]") + .with_status(101) + .run(); +} + +#[cargo_test] +fn bench_with_lib_dep() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "baz" + path = "src/main.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + /// + /// ```rust + /// extern crate foo; + /// fn main() { + /// println!("{}", foo::foo()); + /// } + /// ``` + /// + pub fn foo(){} + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + "#, + ) + .file( + "src/main.rs", + " + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + #[cfg(test)] + extern crate test; + + fn main() {} + + #[bench] + fn bin_bench(_b: &mut test::Bencher) {} + ", + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE] +[RUNNING] target/release/deps/baz-[..][EXE]", + ) + .with_stdout_contains("test lib_bench ... bench: [..]") + .with_stdout_contains("test bin_bench ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_with_deep_lib_dep() { + if !is_nightly() { + return; + } + + let p = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = "../foo" + "#, + ) + .file( + "src/lib.rs", + " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate foo; + #[cfg(test)] + extern crate test; + #[bench] + fn bar_bench(_b: &mut test::Bencher) { + foo::foo(); + } + ", + ) + .build(); + let _p2 = project() + .file( + "src/lib.rs", + " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + pub fn foo() {} + + #[bench] + fn foo_bench(_b: &mut test::Bencher) {} + ", + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[COMPILING] bar v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/bar-[..][EXE]", + ) + .with_stdout_contains("test bar_bench ... bench: [..]") + .run(); +} + +#[cargo_test] +fn external_bench_explicit() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bench]] + name = "bench" + path = "src/bench.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + pub fn get_hello() -> &'static str { "Hello" } + + #[bench] + fn internal_bench(_b: &mut test::Bencher) {} + "#, + ) + .file( + "src/bench.rs", + r#" + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + "#, + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE] +[RUNNING] target/release/deps/bench-[..][EXE]", + ) + .with_stdout_contains("test internal_bench ... bench: [..]") + .with_stdout_contains("test external_bench ... bench: [..]") + .run(); +} + +#[cargo_test] +fn external_bench_implicit() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + pub fn get_hello() -> &'static str { "Hello" } + + #[bench] + fn internal_bench(_b: &mut test::Bencher) {} + "#, + ) + .file( + "benches/external.rs", + r#" + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + "#, + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE] +[RUNNING] target/release/deps/external-[..][EXE]", + ) + .with_stdout_contains("test internal_bench ... bench: [..]") + .with_stdout_contains("test external_bench ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_autodiscover_2015() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + edition = "2015" + + [[bench]] + name = "bench_magic" + required-features = ["magic"] + "#, + ) + .file("src/lib.rs", "") + .file( + "benches/bench_basic.rs", + r#" + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + extern crate test; + + #[bench] + fn bench_basic(_b: &mut test::Bencher) {} + "#, + ) + .file( + "benches/bench_magic.rs", + r#" + #![feature(test)] + #[allow(unused_extern_crates)] + extern crate foo; + extern crate test; + + #[bench] + fn bench_magic(_b: &mut test::Bencher) {} + "#, + ) + .build(); + + p.cargo("bench bench_basic") + .with_stderr( + "warning: \ +An explicit [[bench]] section is specified in Cargo.toml which currently +disables Cargo from automatically inferring other benchmark targets. +This inference behavior will change in the Rust 2018 edition and the following +files will be included as a benchmark target: + +* [..]bench_basic.rs + +This is likely to break cargo build or cargo test as these files may not be +ready to be compiled as a benchmark target today. You can future-proof yourself +and disable this warning by adding `autobenches = false` to your [package] +section. You may also move the files to a location where Cargo would not +automatically infer them to be a target, such as in subfolders. + +For more information on this warning you can consult +https://github.com/rust-lang/cargo/issues/5330 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE] +", + ) + .run(); +} + +#[cargo_test] +fn dont_run_examples() { + if !is_nightly() { + return; + } + + let p = project() + .file("src/lib.rs", r"") + .file( + "examples/dont-run-me-i-will-fail.rs", + r#"fn main() { panic!("Examples should not be run by 'cargo test'"); }"#, + ) + .build(); + p.cargo("bench").run(); +} + +#[cargo_test] +fn pass_through_command_line() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "src/lib.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] fn foo(_b: &mut test::Bencher) {} + #[bench] fn bar(_b: &mut test::Bencher) {} + ", + ) + .build(); + + p.cargo("bench bar") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test bar ... bench: [..]") + .run(); + + p.cargo("bench foo") + .with_stderr( + "[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test foo ... bench: [..]") + .run(); +} + +// Regression test for running cargo-bench twice with +// tests in an rlib +#[cargo_test] +fn cargo_bench_twice() { + if !is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file( + "src/foo.rs", + r#" + #![crate_type = "rlib"] + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] + fn dummy_bench(b: &mut test::Bencher) { } + "#, + ) + .build(); + + for _ in 0..2 { + p.cargo("bench").run(); + } +} + +#[cargo_test] +fn lib_bin_same_name() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + [[bin]] + name = "foo" + "#, + ) + .file( + "src/lib.rs", + " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[bench] fn lib_bench(_b: &mut test::Bencher) {} + ", + ) + .file( + "src/main.rs", + " + #![cfg_attr(test, feature(test))] + #[allow(unused_extern_crates)] + extern crate foo; + #[cfg(test)] + extern crate test; + + #[bench] + fn bin_bench(_b: &mut test::Bencher) {} + ", + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains_n("test [..] ... bench: [..]", 2) + .run(); +} + +#[cargo_test] +fn lib_with_standard_name() { + if !is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) + .file( + "src/lib.rs", + " + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + + /// ``` + /// syntax::foo(); + /// ``` + pub fn foo() {} + + #[bench] + fn foo_bench(_b: &mut test::Bencher) {} + ", + ) + .file( + "benches/bench.rs", + " + #![feature(test)] + extern crate syntax; + extern crate test; + + #[bench] + fn bench(_b: &mut test::Bencher) { syntax::foo() } + ", + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] syntax v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/syntax-[..][EXE] +[RUNNING] target/release/deps/bench-[..][EXE]", + ) + .with_stdout_contains("test foo_bench ... bench: [..]") + .with_stdout_contains("test bench ... bench: [..]") + .run(); +} + +#[cargo_test] +fn lib_with_standard_name2() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + name = "syntax" + bench = false + doctest = false + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/main.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate syntax; + #[cfg(test)] + extern crate test; + + fn main() {} + + #[bench] + fn bench(_b: &mut test::Bencher) { syntax::foo() } + ", + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] syntax v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/syntax-[..][EXE]", + ) + .with_stdout_contains("test bench ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_dylib() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + extern crate bar as the_bar; + #[cfg(test)] + extern crate test; + + pub fn bar() { the_bar::baz(); } + + #[bench] + fn foo(_b: &mut test::Bencher) {} + "#, + ) + .file( + "benches/bench.rs", + r#" + #![feature(test)] + extern crate foo as the_foo; + extern crate test; + + #[bench] + fn foo(_b: &mut test::Bencher) { the_foo::bar(); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#, + ) + .file("bar/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("bench -v") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[RUNNING] [..] -C opt-level=3 [..] +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench` +[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`", + ) + .with_stdout_contains_n("test foo ... bench: [..]", 2) + .run(); + + p.root().move_into_the_past(); + p.cargo("bench -v") + .with_stderr( + "\ +[FRESH] bar v0.0.1 ([CWD]/bar) +[FRESH] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench` +[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`", + ) + .with_stdout_contains_n("test foo ... bench: [..]", 2) + .run(); +} + +#[cargo_test] +fn bench_twice_with_build_cmd() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file( + "src/lib.rs", + " + #![feature(test)] + #[cfg(test)] + extern crate test; + #[bench] + fn foo(_b: &mut test::Bencher) {} + ", + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test foo ... bench: [..]") + .run(); + + p.cargo("bench") + .with_stderr( + "[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test foo ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_with_examples() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "6.6.6" + authors = [] + + [[example]] + name = "teste1" + + [[bench]] + name = "testb1" + "#, + ) + .file( + "src/lib.rs", + r#" + #![cfg_attr(test, feature(test))] + #[cfg(test)] + extern crate test; + #[cfg(test)] + use test::Bencher; + + pub fn f1() { + println!("f1"); + } + + pub fn f2() {} + + #[bench] + fn bench_bench1(_b: &mut Bencher) { + f2(); + } + "#, + ) + .file( + "benches/testb1.rs", + " + #![feature(test)] + extern crate foo; + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bench2(_b: &mut Bencher) { + foo::f2(); + } + ", + ) + .file( + "examples/teste1.rs", + r#" + extern crate foo; + + fn main() { + println!("example1"); + foo::f1(); + } + "#, + ) + .build(); + + p.cargo("bench -v") + .with_stderr( + "\ +[COMPILING] foo v6.6.6 ([CWD]) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[CWD]/target/release/deps/foo-[..][EXE] --bench` +[RUNNING] `[CWD]/target/release/deps/testb1-[..][EXE] --bench`", + ) + .with_stdout_contains("test bench_bench1 ... bench: [..]") + .with_stdout_contains("test bench_bench2 ... bench: [..]") + .run(); +} + +#[cargo_test] +fn test_a_bench() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [lib] + name = "foo" + test = false + doctest = false + + [[bench]] + name = "b" + test = true + "#, + ) + .file("src/lib.rs", "") + .file("benches/b.rs", "#[test] fn foo() {}") + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/b-[..][EXE]", + ) + .with_stdout_contains("test foo ... ok") + .run(); +} + +#[cargo_test] +fn test_bench_no_run() { + if !is_nightly() { + return; + } + + let p = project() + .file("src/lib.rs", "") + .file( + "benches/bbaz.rs", + r#" + #![feature(test)] + + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_baz(_: &mut Bencher) {} + "#, + ) + .build(); + + p.cargo("bench --no-run") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn test_bench_no_fail_fast() { + if !is_nightly() { + return; + } + + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[bench] + fn bench_hello(_b: &mut test::Bencher) { + assert_eq!(hello(), "hello") + } + + #[bench] + fn bench_nope(_b: &mut test::Bencher) { + assert_eq!("nope", hello()) + }"#, + ) + .build(); + + p.cargo("bench --no-fail-fast -- --test-threads=1") + .with_status(101) + .with_stderr_contains("[RUNNING] target/release/deps/foo-[..][EXE]") + .with_stdout_contains("running 2 tests") + .with_stderr_contains("[RUNNING] target/release/deps/foo-[..][EXE]") + .with_stdout_contains("test bench_hello [..]") + .with_stdout_contains("test bench_nope [..]") + .run(); +} + +#[cargo_test] +fn test_bench_multiple_packages() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#, + ) + .file("src/lib.rs", "") + .build(); + + let _bar = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + authors = [] + version = "0.1.0" + + [[bench]] + name = "bbar" + test = true + "#, + ) + .file("src/lib.rs", "") + .file( + "benches/bbar.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_b: &mut Bencher) {} + "#, + ) + .build(); + + let _baz = project() + .at("baz") + .file( + "Cargo.toml", + r#" + [project] + name = "baz" + authors = [] + version = "0.1.0" + + [[bench]] + name = "bbaz" + test = true + "#, + ) + .file("src/lib.rs", "") + .file( + "benches/bbaz.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_baz(_b: &mut Bencher) {} + "#, + ) + .build(); + + p.cargo("bench -p bar -p baz") + .with_stderr_contains("[RUNNING] target/release/deps/bbaz-[..][EXE]") + .with_stdout_contains("test bench_baz ... bench: [..]") + .with_stderr_contains("[RUNNING] target/release/deps/bbar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_all_workspace() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file( + "bar/benches/bar.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + p.cargo("bench --all") + .with_stderr_contains("[RUNNING] target/release/deps/bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .with_stderr_contains("[RUNNING] target/release/deps/foo-[..][EXE]") + .with_stdout_contains("test bench_foo ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_all_exclude() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file( + "bar/src/lib.rs", + r#" + #![feature(test)] + #[cfg(test)] + extern crate test; + + #[bench] + pub fn bar(b: &mut test::Bencher) { + b.iter(|| {}); + } + "#, + ) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file( + "baz/src/lib.rs", + "#[test] pub fn baz() { break_the_build(); }", + ) + .build(); + + p.cargo("bench --all --exclude baz") + .with_stdout_contains( + "\ +running 1 test +test bar ... bench: [..] ns/iter (+/- [..])", + ) + .run(); +} + +#[cargo_test] +fn bench_all_virtual_manifest() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file( + "bar/benches/bar.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#, + ) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .file( + "baz/benches/baz.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_baz(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + // The order in which bar and baz are built is not guaranteed + p.cargo("bench --all") + .with_stderr_contains("[RUNNING] target/release/deps/baz-[..][EXE]") + .with_stdout_contains("test bench_baz ... bench: [..]") + .with_stderr_contains("[RUNNING] target/release/deps/bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .run(); +} + +// https://github.com/rust-lang/cargo/issues/4287 +#[cargo_test] +fn legacy_bench_name() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [[bench]] + name = "bench" + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/bench.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + p.cargo("bench") + .with_stderr_contains( + "\ +[WARNING] path `[..]src/bench.rs` was erroneously implicitly accepted for benchmark `bench`, +please set bench.path in Cargo.toml", + ) + .run(); +} + +#[cargo_test] +fn bench_virtual_manifest_all_implied() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn foo() {}") + .file( + "bar/benches/bar.rs", + r#" + #![feature(test)] + extern crate test; + use test::Bencher; + #[bench] + fn bench_bar(_: &mut Bencher) -> () { () } + "#, + ) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .file( + "baz/benches/baz.rs", + r#" + #![feature(test)] + extern crate test; + use test::Bencher; + #[bench] + fn bench_baz(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + // The order in which bar and baz are built is not guaranteed + + p.cargo("bench") + .with_stderr_contains("[RUNNING] target/release/deps/baz-[..][EXE]") + .with_stdout_contains("test bench_baz ... bench: [..]") + .with_stderr_contains("[RUNNING] target/release/deps/bar-[..][EXE]") + .with_stdout_contains("test bench_bar ... bench: [..]") + .run(); +} + +#[cargo_test] +fn json_artifact_includes_executable_for_benchmark() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "benches/benchmark.rs", + r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_foo(_: &mut Bencher) -> () { () } + "#, + ) + .build(); + + p.cargo("bench --no-run --message-format=json") + .with_json( + r#" + { + "executable": "[..]/foo/target/release/benchmark-[..][EXE]", + "features": [], + "filenames": [ "[..]/foo/target/release/benchmark-[..][EXE]" ], + "fresh": false, + "package_id": "foo 0.0.1 ([..])", + "profile": "{...}", + "reason": "compiler-artifact", + "target": { + "crate_types": [ "bin" ], + "kind": [ "bench" ], + "doctest": false, + "edition": "2015", + "name": "benchmark", + "src_path": "[..]/foo/benches/benchmark.rs" + } + } + "#, + ) + .run(); +} diff --git a/tests/testsuite/build.rs b/tests/testsuite/build.rs new file mode 100644 index 00000000000..59ed086708d --- /dev/null +++ b/tests/testsuite/build.rs @@ -0,0 +1,4697 @@ +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; + +use crate::support::paths::{root, CargoPathExt}; +use crate::support::registry::Package; +use crate::support::ProjectBuilder; +use crate::support::{ + basic_bin_manifest, basic_lib_manifest, basic_manifest, rustc_host, sleep_ms, +}; +use crate::support::{main_file, project, Execs}; +use cargo::util::paths::dylib_path_envvar; + +#[cargo_test] +fn cargo_compile_simple() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); +} + +#[cargo_test] +fn cargo_fail_with_no_stderr() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &String::from("refusal")) + .build(); + p.cargo("build --message-format=json") + .with_status(101) + .with_stderr_does_not_contain("--- stderr") + .run(); +} + +/// Checks that the `CARGO_INCREMENTAL` environment variable results in +/// `rustc` getting `-C incremental` passed to it. +#[cargo_test] +fn cargo_compile_incremental() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("build -v") + .env("CARGO_INCREMENTAL", "1") + .with_stderr_contains( + "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n", + ) + .run(); + + p.cargo("test -v") + .env("CARGO_INCREMENTAL", "1") + .with_stderr_contains( + "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n", + ) + .run(); +} + +#[cargo_test] +fn incremental_profile() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [profile.dev] + incremental = false + + [profile.release] + incremental = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .env_remove("CARGO_INCREMENTAL") + .with_stderr_does_not_contain("[..]C incremental=[..]") + .run(); + + p.cargo("build -v") + .env("CARGO_INCREMENTAL", "1") + .with_stderr_contains("[..]C incremental=[..]") + .run(); + + p.cargo("build --release -v") + .env_remove("CARGO_INCREMENTAL") + .with_stderr_contains("[..]C incremental=[..]") + .run(); + + p.cargo("build --release -v") + .env("CARGO_INCREMENTAL", "0") + .with_stderr_does_not_contain("[..]C incremental=[..]") + .run(); +} + +#[cargo_test] +fn incremental_config() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [build] + incremental = false + "#, + ) + .build(); + + p.cargo("build -v") + .env_remove("CARGO_INCREMENTAL") + .with_stderr_does_not_contain("[..]C incremental=[..]") + .run(); + + p.cargo("build -v") + .env("CARGO_INCREMENTAL", "1") + .with_stderr_contains("[..]C incremental=[..]") + .run(); +} + +#[cargo_test] +fn cargo_compile_with_workspace_excluded() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + p.cargo("build --all --exclude foo") + .with_stderr_does_not_contain("[..]virtual[..]") + .with_stderr_contains("[..]no packages to compile") + .with_status(101) + .run(); +} + +#[cargo_test] +fn cargo_compile_manifest_path() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("build --manifest-path foo/Cargo.toml") + .cwd(p.root().parent().unwrap()) + .run(); + assert!(p.bin("foo").is_file()); +} + +#[cargo_test] +fn cargo_compile_with_invalid_manifest() { + let p = project().file("Cargo.toml", "").build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + virtual manifests must be configured with [workspace] +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_invalid_manifest2() { + let p = project() + .file( + "Cargo.toml", + r" + [project] + foo = bar + ", + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + invalid number at line 3 +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_invalid_manifest3() { + let p = project().file("src/Cargo.toml", "a = bar").build(); + + p.cargo("build --manifest-path src/Cargo.toml") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + invalid number at line 1 +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_duplicate_build_targets() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "main" + path = "src/main.rs" + crate-type = ["dylib"] + + [dependencies] + "#, + ) + .file("src/main.rs", "#![allow(warnings)] fn main() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +warning: file found to be present in multiple build targets: [..]main.rs +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_invalid_version() { + let p = project() + .file("Cargo.toml", &basic_manifest("foo", "1.0")) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Expected dot for key `package.version` +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_empty_package_name() { + let p = project() + .file("Cargo.toml", &basic_manifest("", "0.0.0")) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + package name cannot be an empty string +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_invalid_package_name() { + let p = project() + .file("Cargo.toml", &basic_manifest("foo::bar", "0.0.0")) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Invalid character `:` in package name: `foo::bar` +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_invalid_bin_target_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "" + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + binary target names cannot be empty +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_forbidden_bin_target_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "build" + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + the binary target name `build` is forbidden +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_bin_and_crate_type() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "the_foo_bin" + path = "src/foo.rs" + crate-type = ["cdylib", "rlib"] + "#, + ) + .file("src/foo.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + the target `the_foo_bin` is a binary and can't have any crate-types set \ +(currently \"cdylib, rlib\")", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_bin_and_proc() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "the_foo_bin" + path = "src/foo.rs" + proc-macro = true + "#, + ) + .file("src/foo.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + the target `the_foo_bin` is a binary and can't have `proc-macro` set `true`", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_invalid_lib_target_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [lib] + name = "" + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + library target names cannot be empty +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_invalid_non_numeric_dep_version() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + crossbeam = "y" + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[CWD]/Cargo.toml` + +Caused by: + failed to parse the version requirement `y` for dependency `crossbeam` + +Caused by: + the given version requirement is invalid +", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_without_manifest() { + let p = project().no_manifest().build(); + + p.cargo("build") + .with_status(101) + .with_stderr("[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory") + .run(); +} + +#[cargo_test] +fn cargo_compile_with_invalid_code() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", "invalid rust code!") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +[ERROR] Could not compile `foo`. + +To learn more, run the command again with --verbose.\n", + ) + .run(); + assert!(p.root().join("Cargo.lock").is_file()); +} + +#[cargo_test] +fn cargo_compile_with_invalid_code_in_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + [dependencies.baz] + path = "../baz" + "#, + ) + .file("src/main.rs", "invalid rust code!") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "invalid rust code!") + .build(); + let _baz = project() + .at("baz") + .file("Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("src/lib.rs", "invalid rust code!") + .build(); + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]invalid rust code[..]") + .with_stderr_contains("[ERROR] Could not compile [..]") + .run(); +} + +#[cargo_test] +fn cargo_compile_with_warnings_in_the_root_package() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", "fn main() {} fn dead() {}") + .build(); + + p.cargo("build") + .with_stderr_contains("[..]function is never used: `dead`[..]") + .run(); +} + +#[cargo_test] +fn cargo_compile_with_warnings_in_a_dep_package() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + + [[bin]] + + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file( + "bar/src/bar.rs", + r#" + pub fn gimme() -> &'static str { + "test passed" + } + + fn dead() {} + "#, + ) + .build(); + + p.cargo("build") + .with_stderr_contains("[..]function is never used: `dead`[..]") + .run(); + + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("test passed\n").run(); +} + +#[cargo_test] +fn cargo_compile_with_nested_deps_inferred() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = 'bar' + + [[bin]] + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) + .file( + "baz/src/lib.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + assert!(!p.bin("libbar.rlib").is_file()); + assert!(!p.bin("libbaz.rlib").is_file()); + + p.process(&p.bin("foo")).with_stdout("test passed\n").run(); +} + +#[cargo_test] +fn cargo_compile_with_nested_deps_correct_bin() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + + [[bin]] + name = "foo" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) + .file( + "baz/src/lib.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + assert!(!p.bin("libbar.rlib").is_file()); + assert!(!p.bin("libbaz.rlib").is_file()); + + p.process(&p.bin("foo")).with_stdout("test passed\n").run(); +} + +#[cargo_test] +fn cargo_compile_with_nested_deps_shorthand() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file( + "baz/src/baz.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + assert!(!p.bin("libbar.rlib").is_file()); + assert!(!p.bin("libbaz.rlib").is_file()); + + p.process(&p.bin("foo")).with_stdout("test passed\n").run(); +} + +#[cargo_test] +fn cargo_compile_with_nested_deps_longhand() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + version = "0.5.0" + + [[bin]] + + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + path = "../baz" + version = "0.5.0" + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file( + "baz/src/baz.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + assert!(!p.bin("libbar.rlib").is_file()); + assert!(!p.bin("libbaz.rlib").is_file()); + + p.process(&p.bin("foo")).with_stdout("test passed\n").run(); +} + +// Check that Cargo gives a sensible error if a dependency can't be found +// because of a name mismatch. +#[cargo_test] +fn cargo_compile_with_dep_name_mismatch() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["wycats@example.com"] + + [[bin]] + + name = "foo" + + [dependencies.notquitebar] + + path = "bar" + "#, + ) + .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"])) + .file("bar/Cargo.toml", &basic_bin_manifest("bar")) + .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[])) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + r#"error: no matching package named `notquitebar` found +location searched: [CWD]/bar +required by package `foo v0.0.1 ([CWD])` +"#, + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_filename() { + let p = project() + .file("src/lib.rs", "") + .file( + "src/bin/a.rs", + r#" + extern crate foo; + fn main() { println!("hello a.rs"); } + "#, + ) + .file("examples/a.rs", r#"fn main() { println!("example"); }"#) + .build(); + + p.cargo("build --bin bin.rs") + .with_status(101) + .with_stderr("[ERROR] no bin target named `bin.rs`") + .run(); + + p.cargo("build --bin a.rs") + .with_status(101) + .with_stderr( + "\ +[ERROR] no bin target named `a.rs` + +Did you mean `a`?", + ) + .run(); + + p.cargo("build --example example.rs") + .with_status(101) + .with_stderr("[ERROR] no example target named `example.rs`") + .run(); + + p.cargo("build --example a.rs") + .with_status(101) + .with_stderr( + "\ +[ERROR] no example target named `a.rs` + +Did you mean `a`?", + ) + .run(); +} + +#[cargo_test] +fn incompatible_dependencies() { + Package::new("bad", "0.1.0").publish(); + Package::new("bad", "1.0.0").publish(); + Package::new("bad", "1.0.1").publish(); + Package::new("bad", "1.0.2").publish(); + Package::new("bar", "0.1.0").dep("bad", "0.1.0").publish(); + Package::new("baz", "0.1.1").dep("bad", "=1.0.0").publish(); + Package::new("baz", "0.1.0").dep("bad", "=1.0.0").publish(); + Package::new("qux", "0.1.2").dep("bad", ">=1.0.1").publish(); + Package::new("qux", "0.1.1").dep("bad", ">=1.0.1").publish(); + Package::new("qux", "0.1.0").dep("bad", ">=1.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = "0.1.0" + baz = "0.1.0" + qux = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main(){}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +error: failed to select a version for `bad`. + ... required by package `qux v0.1.0` + ... which is depended on by `foo v0.0.1 ([..])` +versions that meet the requirements `>= 1.0.1` are: 1.0.2, 1.0.1 + +all possible versions conflict with previously selected packages. + + previously selected package `bad v1.0.0` + ... which is depended on by `baz v0.1.0` + ... which is depended on by `foo v0.0.1 ([..])` + +failed to select a version for `bad` which could resolve this conflict", + ) + .run(); +} + +#[cargo_test] +fn incompatible_dependencies_with_multi_semver() { + Package::new("bad", "1.0.0").publish(); + Package::new("bad", "1.0.1").publish(); + Package::new("bad", "2.0.0").publish(); + Package::new("bad", "2.0.1").publish(); + Package::new("bar", "0.1.0").dep("bad", "=1.0.0").publish(); + Package::new("baz", "0.1.0").dep("bad", ">=2.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = "0.1.0" + baz = "0.1.0" + bad = ">=1.0.1, <=2.0.0" + "#, + ) + .file("src/main.rs", "fn main(){}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +error: failed to select a version for `bad`. + ... required by package `foo v0.0.1 ([..])` +versions that meet the requirements `>= 1.0.1, <= 2.0.0` are: 2.0.0, 1.0.1 + +all possible versions conflict with previously selected packages. + + previously selected package `bad v2.0.1` + ... which is depended on by `baz v0.1.0` + ... which is depended on by `foo v0.0.1 ([..])` + + previously selected package `bad v1.0.0` + ... which is depended on by `bar v0.1.0` + ... which is depended on by `foo v0.0.1 ([..])` + +failed to select a version for `bad` which could resolve this conflict", + ) + .run(); +} + +#[cargo_test] +fn compile_path_dep_then_change_version() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + File::create(&p.root().join("bar/Cargo.toml")) + .unwrap() + .write_all(basic_manifest("bar", "0.0.2").as_bytes()) + .unwrap(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn ignores_carriage_return_in_lockfile() { + let p = project() + .file("src/main.rs", r"mod a; fn main() {}") + .file("src/a.rs", "") + .build(); + + p.cargo("build").run(); + + let lockfile = p.root().join("Cargo.lock"); + let mut lock = String::new(); + File::open(&lockfile) + .unwrap() + .read_to_string(&mut lock) + .unwrap(); + let lock = lock.replace("\n", "\r\n"); + File::create(&lockfile) + .unwrap() + .write_all(lock.as_bytes()) + .unwrap(); + p.cargo("build").run(); +} + +#[cargo_test] +fn cargo_default_env_metadata_env_var() { + // Ensure that path dep + dylib + env_var get metadata + // (even though path_dep + dylib should not) + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "// hi") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#, + ) + .file("bar/src/lib.rs", "// hello") + .build(); + + // No metadata on libbar since it's a dylib path dependency + p.cargo("build -v") + .with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[RUNNING] `rustc --crate-name bar bar/src/lib.rs --color never --crate-type dylib \ + --emit=[..]link \ + -C prefer-dynamic -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps \ + --extern bar=[CWD]/target/debug/deps/{prefix}bar{suffix}` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX, + )) + .run(); + + p.cargo("clean").run(); + + // If you set the env-var, then we expect metadata on libbar + p.cargo("build -v") + .env("__CARGO_DEFAULT_LIB_METADATA", "stable") + .with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[RUNNING] `rustc --crate-name bar bar/src/lib.rs --color never --crate-type dylib \ + --emit=[..]link \ + -C prefer-dynamic -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps \ + --extern bar=[CWD]/target/debug/deps/{prefix}bar-[..]{suffix}` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX, + )) + .run(); +} + +#[cargo_test] +fn crate_env_vars() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.1-alpha.1" + description = "This is foo" + homepage = "https://example.com" + repository = "https://example.com/repo.git" + authors = ["wycats@example.com"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + + static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR"); + static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR"); + static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH"); + static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE"); + static VERSION: &'static str = env!("CARGO_PKG_VERSION"); + static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR"); + static PKG_NAME: &'static str = env!("CARGO_PKG_NAME"); + static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE"); + static REPOSITORY: &'static str = env!("CARGO_PKG_REPOSITORY"); + static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION"); + + fn main() { + let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR, + VERSION_MINOR, VERSION_PATCH, VERSION_PRE, + CARGO_MANIFEST_DIR); + assert_eq!(s, foo::version()); + println!("{}", s); + assert_eq!("foo", PKG_NAME); + assert_eq!("https://example.com", HOMEPAGE); + assert_eq!("https://example.com/repo.git", REPOSITORY); + assert_eq!("This is foo", DESCRIPTION); + let s = format!("{}.{}.{}-{}", VERSION_MAJOR, + VERSION_MINOR, VERSION_PATCH, VERSION_PRE); + assert_eq!(s, VERSION); + } + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn version() -> String { + format!("{}-{}-{} @ {} in {}", + env!("CARGO_PKG_VERSION_MAJOR"), + env!("CARGO_PKG_VERSION_MINOR"), + env!("CARGO_PKG_VERSION_PATCH"), + env!("CARGO_PKG_VERSION_PRE"), + env!("CARGO_MANIFEST_DIR")) + } + "#, + ) + .build(); + + println!("build"); + p.cargo("build -v").run(); + + println!("bin"); + p.process(&p.bin("foo")) + .with_stdout("0-5-1 @ alpha.1 in [CWD]") + .run(); + + println!("test"); + p.cargo("test -v").run(); +} + +#[cargo_test] +fn crate_authors_env_vars() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.1-alpha.1" + authors = ["wycats@example.com", "neikos@example.com"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS"); + + fn main() { + let s = "wycats@example.com:neikos@example.com"; + assert_eq!(AUTHORS, foo::authors()); + println!("{}", AUTHORS); + assert_eq!(s, AUTHORS); + } + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn authors() -> String { + format!("{}", env!("CARGO_PKG_AUTHORS")) + } + "#, + ) + .build(); + + println!("build"); + p.cargo("build -v").run(); + + println!("bin"); + p.process(&p.bin("foo")) + .with_stdout("wycats@example.com:neikos@example.com") + .run(); + + println!("test"); + p.cargo("test -v").run(); +} + +#[cargo_test] +fn vv_prints_rustc_env_vars() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = ["escape='\"@example.com"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + let mut b = p.cargo("build -vv"); + + if cfg!(windows) { + b.with_stderr_contains( + "[RUNNING] `[..]set CARGO_PKG_NAME=foo&& [..]rustc [..]`" + ).with_stderr_contains( + r#"[RUNNING] `[..]set CARGO_PKG_AUTHORS="escape='\"@example.com"&& [..]rustc [..]`"# + ) + } else { + b.with_stderr_contains("[RUNNING] `[..]CARGO_PKG_NAME=foo [..]rustc [..]`") + .with_stderr_contains( + r#"[RUNNING] `[..]CARGO_PKG_AUTHORS='escape='\''"@example.com' [..]rustc [..]`"#, + ) + }; + + b.run(); +} + +// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error +fn setenv_for_removing_empty_component(mut execs: Execs) -> Execs { + let v = dylib_path_envvar(); + if let Ok(search_path) = env::var(v) { + let new_search_path = + env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty())) + .expect("join_paths"); + execs.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly + } + execs +} + +// Regression test for #4277 +#[cargo_test] +fn crate_library_path_env_var() { + let p = project() + .file( + "src/main.rs", + &format!( + r##" + fn main() {{ + let search_path = env!("{}"); + let paths = std::env::split_paths(&search_path).collect::>(); + assert!(!paths.contains(&"".into())); + }} + "##, + dylib_path_envvar() + ), + ) + .build(); + + setenv_for_removing_empty_component(p.cargo("run")).run(); +} + +// Regression test for #4277 +#[cargo_test] +fn build_with_fake_libc_not_loading() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .file("libc.so.6", r#""#) + .build(); + + setenv_for_removing_empty_component(p.cargo("build")).run(); +} + +// this is testing that src/.rs still works (for now) +#[cargo_test] +fn many_crate_types_old_style_lib_location() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "foo" + crate_type = ["rlib", "dylib"] + "#, + ) + .file("src/foo.rs", "pub fn foo() {}") + .build(); + p.cargo("build") + .with_stderr_contains( + "\ +[WARNING] path `[..]src/foo.rs` was erroneously implicitly accepted for library `foo`, +please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", + ) + .run(); + + assert!(p.root().join("target/debug/libfoo.rlib").is_file()); + let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + assert!(p.root().join("target/debug").join(&fname).is_file()); +} + +#[cargo_test] +fn many_crate_types_correct() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "foo" + crate_type = ["rlib", "dylib"] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + p.cargo("build").run(); + + assert!(p.root().join("target/debug/libfoo.rlib").is_file()); + let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + assert!(p.root().join("target/debug").join(&fname).is_file()); +} + +#[cargo_test] +fn self_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [dependencies.test] + + path = "." + + [lib] + name = "test" + path = "src/test.rs" + "#, + ) + .file("src/test.rs", "fn main() {}") + .build(); + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] cyclic package dependency: package `test v0.0.0 ([CWD])` depends on itself. Cycle: +package `test v0.0.0 ([CWD])`", + ) + .run(); +} + +#[cargo_test] +fn ignore_broken_symlinks() { + // windows and symlinks don't currently agree that well + if cfg!(windows) { + return; + } + + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .symlink("Notafile", "bar") + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); +} + +#[cargo_test] +fn missing_lib_and_bin() { + let p = project().build(); + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]Cargo.toml` + +Caused by: + no targets specified in the manifest + either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n", + ) + .run(); +} + +#[cargo_test] +fn lto_build() { + // FIXME: currently this hits a linker bug on 32-bit MSVC + if cfg!(all(target_env = "msvc", target_pointer_width = "32")) { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.release] + lto = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + p.cargo("build -v --release") + .with_stderr( + "\ +[COMPILING] test v0.0.0 ([CWD]) +[RUNNING] `rustc --crate-name test src/main.rs --color never --crate-type bin \ + --emit=[..]link \ + -C opt-level=3 \ + -C lto \ + -C metadata=[..] \ + --out-dir [CWD]/target/release/deps \ + -L dependency=[CWD]/target/release/deps` +[FINISHED] release [optimized] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn verbose_build() { + let p = project().file("src/lib.rs", "").build(); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn verbose_release_build() { + let p = project().file("src/lib.rs", "").build(); + p.cargo("build -v --release") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/release/deps` +[FINISHED] release [optimized] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn verbose_release_build_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [dependencies.foo] + path = "foo" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib", "rlib"] + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + p.cargo("build -v --release") + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ([CWD]/foo) +[RUNNING] `rustc --crate-name foo foo/src/lib.rs --color never \ + --crate-type dylib --crate-type rlib \ + --emit=[..]link \ + -C prefer-dynamic \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/release/deps` +[COMPILING] test v0.0.0 ([CWD]) +[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \ + --emit=[..]link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/release/deps \ + --extern foo=[CWD]/target/release/deps/{prefix}foo{suffix} \ + --extern foo=[CWD]/target/release/deps/libfoo.rlib` +[FINISHED] release [optimized] target(s) in [..] +", + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX + )) + .run(); +} + +#[cargo_test] +fn explicit_examples() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [lib] + name = "foo" + path = "src/lib.rs" + + [[example]] + name = "hello" + path = "examples/ex-hello.rs" + + [[example]] + name = "goodbye" + path = "examples/ex-goodbye.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn get_hello() -> &'static str { "Hello" } + pub fn get_goodbye() -> &'static str { "Goodbye" } + pub fn get_world() -> &'static str { "World" } + "#, + ) + .file( + "examples/ex-hello.rs", + r#" + extern crate foo; + fn main() { println!("{}, {}!", foo::get_hello(), foo::get_world()); } + "#, + ) + .file( + "examples/ex-goodbye.rs", + r#" + extern crate foo; + fn main() { println!("{}, {}!", foo::get_goodbye(), foo::get_world()); } + "#, + ) + .build(); + + p.cargo("build --examples").run(); + p.process(&p.bin("examples/hello")) + .with_stdout("Hello, World!\n") + .run(); + p.process(&p.bin("examples/goodbye")) + .with_stdout("Goodbye, World!\n") + .run(); +} + +#[cargo_test] +fn non_existing_example() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [lib] + name = "foo" + path = "src/lib.rs" + + [[example]] + name = "hello" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("test -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `hello` example, specify example.path", + ) + .run(); +} + +#[cargo_test] +fn non_existing_binary() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/lib.rs", "") + .file("src/bin/ehlo.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `foo` bin, specify bin.path", + ) + .run(); +} + +#[cargo_test] +fn legacy_binary_paths_warnings() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .with_stderr_contains( + "\ +[WARNING] path `[..]src/main.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml", + ) + .run(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .with_stderr_contains( + "\ +[WARNING] path `[..]src/bin/main.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml", + ) + .run(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file("src/bar.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .with_stderr_contains( + "\ +[WARNING] path `[..]src/bar.rs` was erroneously implicitly accepted for binary `bar`, +please set bin.path in Cargo.toml", + ) + .run(); +} + +#[cargo_test] +fn implicit_examples() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn get_hello() -> &'static str { "Hello" } + pub fn get_goodbye() -> &'static str { "Goodbye" } + pub fn get_world() -> &'static str { "World" } + "#, + ) + .file( + "examples/hello.rs", + r#" + extern crate foo; + fn main() { + println!("{}, {}!", foo::get_hello(), foo::get_world()); + } + "#, + ) + .file( + "examples/goodbye.rs", + r#" + extern crate foo; + fn main() { + println!("{}, {}!", foo::get_goodbye(), foo::get_world()); + } + "#, + ) + .build(); + + p.cargo("build --examples").run(); + p.process(&p.bin("examples/hello")) + .with_stdout("Hello, World!\n") + .run(); + p.process(&p.bin("examples/goodbye")) + .with_stdout("Goodbye, World!\n") + .run(); +} + +#[cargo_test] +fn standard_build_no_ndebug() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + r#" + fn main() { + if cfg!(debug_assertions) { + println!("slow") + } else { + println!("fast") + } + } + "#, + ) + .build(); + + p.cargo("build").run(); + p.process(&p.bin("foo")).with_stdout("slow\n").run(); +} + +#[cargo_test] +fn release_build_ndebug() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + r#" + fn main() { + if cfg!(debug_assertions) { + println!("slow") + } else { + println!("fast") + } + } + "#, + ) + .build(); + + p.cargo("build --release").run(); + p.process(&p.release_bin("foo")).with_stdout("fast\n").run(); +} + +#[cargo_test] +fn inferred_main_bin() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + p.cargo("build").run(); + p.process(&p.bin("foo")).run(); +} + +#[cargo_test] +fn deletion_causes_failure() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1")); + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]can't find crate for `bar`") + .run(); +} + +#[cargo_test] +fn bad_cargo_toml_in_target_dir() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("target/Cargo.toml", "bad-toml") + .build(); + + p.cargo("build").run(); + p.process(&p.bin("foo")).run(); +} + +#[cargo_test] +fn lib_with_standard_name() { + let p = project() + .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/main.rs", + "extern crate syntax; fn main() { syntax::foo() }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] syntax v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn simple_staticlib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + crate-type = ["staticlib"] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + // env var is a test for #1381 + p.cargo("build").env("CARGO_LOG", "nekoneko=trace").run(); +} + +#[cargo_test] +fn staticlib_rlib_and_bin() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + crate-type = ["staticlib", "rlib"] + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file("src/main.rs", "extern crate foo; fn main() { foo::foo(); }") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn opt_out_of_bin() { + let p = project() + .file( + "Cargo.toml", + r#" + bin = [] + + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "bad syntax") + .build(); + p.cargo("build").run(); +} + +#[cargo_test] +fn single_lib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [lib] + name = "foo" + path = "src/bar.rs" + "#, + ) + .file("src/bar.rs", "") + .build(); + p.cargo("build").run(); +} + +#[cargo_test] +fn freshness_ignores_excluded() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + exclude = ["src/b*.rs"] + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .build(); + foo.root().move_into_the_past(); + + foo.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + // Smoke test to make sure it doesn't compile again + println!("first pass"); + foo.cargo("build").with_stdout("").run(); + + // Modify an ignored file and make sure we don't rebuild + println!("second pass"); + File::create(&foo.root().join("src/bar.rs")).unwrap(); + foo.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn rebuild_preserves_out_dir() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::fs::File; + use std::path::Path; + + fn main() { + let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo"); + if env::var_os("FIRST").is_some() { + File::create(&path).unwrap(); + } else { + File::create(&path).unwrap(); + } + } + "#, + ) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .build(); + foo.root().move_into_the_past(); + + foo.cargo("build") + .env("FIRST", "1") + .with_stderr( + "\ +[COMPILING] foo v0.0.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + File::create(&foo.root().join("src/bar.rs")).unwrap(); + foo.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn dep_no_libs() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.0")) + .file("bar/src/main.rs", "") + .build(); + foo.cargo("build").run(); +} + +#[cargo_test] +fn recompile_space_in_name() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [lib] + name = "foo" + path = "src/my lib.rs" + "#, + ) + .file("src/my lib.rs", "") + .build(); + foo.cargo("build").run(); + foo.root().move_into_the_past(); + foo.cargo("build").with_stdout("").run(); +} + +#[cfg(unix)] +#[cargo_test] +fn ignore_bad_directories() { + use std::os::unix::prelude::*; + let foo = project() + .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) + .file("src/lib.rs", "") + .build(); + let dir = foo.root().join("tmp"); + fs::create_dir(&dir).unwrap(); + let stat = fs::metadata(&dir).unwrap(); + let mut perms = stat.permissions(); + perms.set_mode(0o644); + fs::set_permissions(&dir, perms.clone()).unwrap(); + foo.cargo("build").run(); + perms.set_mode(0o755); + fs::set_permissions(&dir, perms).unwrap(); +} + +#[cargo_test] +fn bad_cargo_config() { + let foo = project() + .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) + .file("src/lib.rs", "") + .file(".cargo/config", "this is not valid toml") + .build(); + foo.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] could not load Cargo configuration + +Caused by: + could not parse TOML configuration in `[..]` + +Caused by: + could not parse input as TOML + +Caused by: + expected an equals, found an identifier at line 1 +", + ) + .run(); +} + +#[cargo_test] +fn cargo_platform_specific_dependency() { + let host = rustc_host(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [target.{host}.dependencies] + dep = {{ path = "dep" }} + [target.{host}.build-dependencies] + build = {{ path = "build" }} + [target.{host}.dev-dependencies] + dev = {{ path = "dev" }} + "#, + host = host + ), + ) + .file("src/main.rs", "extern crate dep; fn main() { dep::dep() }") + .file( + "tests/foo.rs", + "extern crate dev; #[test] fn foo() { dev::dev() }", + ) + .file( + "build.rs", + "extern crate build; fn main() { build::build(); }", + ) + .file("dep/Cargo.toml", &basic_manifest("dep", "0.5.0")) + .file("dep/src/lib.rs", "pub fn dep() {}") + .file("build/Cargo.toml", &basic_manifest("build", "0.5.0")) + .file("build/src/lib.rs", "pub fn build() {}") + .file("dev/Cargo.toml", &basic_manifest("dev", "0.5.0")) + .file("dev/src/lib.rs", "pub fn dev() {}") + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + p.cargo("test").run(); +} + +#[cargo_test] +fn bad_platform_specific_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [target.wrong-target.dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file( + "bar/src/lib.rs", + r#"pub fn gimme() -> String { format!("") }"#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]can't find crate for `bar`") + .run(); +} + +#[cargo_test] +fn cargo_platform_specific_dependency_wrong_platform() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [target.non-existing-triplet.dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file( + "bar/src/lib.rs", + "invalid rust file, should not be compiled", + ) + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + p.process(&p.bin("foo")).run(); + + let loc = p.root().join("Cargo.lock"); + let mut lockfile = String::new(); + File::open(&loc) + .unwrap() + .read_to_string(&mut lockfile) + .unwrap(); + assert!(lockfile.contains("bar")); +} + +#[cargo_test] +fn example_as_lib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["lib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + p.cargo("build --example=ex").run(); + assert!(p.example_lib("ex", "lib").is_file()); +} + +#[cargo_test] +fn example_as_rlib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["rlib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + p.cargo("build --example=ex").run(); + assert!(p.example_lib("ex", "rlib").is_file()); +} + +#[cargo_test] +fn example_as_dylib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["dylib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + p.cargo("build --example=ex").run(); + assert!(p.example_lib("ex", "dylib").is_file()); +} + +#[cargo_test] +fn example_as_proc_macro() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["proc-macro"] + "#, + ) + .file("src/lib.rs", "") + .file( + "examples/ex.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro] + pub fn eat(_item: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .build(); + + p.cargo("build --example=ex").run(); + assert!(p.example_lib("ex", "proc-macro").is_file()); +} + +#[cargo_test] +fn example_bin_same_name() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .build(); + + p.cargo("build --examples").run(); + + assert!(!p.bin("foo").is_file()); + // We expect a file of the form bin/foo-{metadata_hash} + assert!(p.bin("examples/foo").is_file()); + + p.cargo("build --examples").run(); + + assert!(!p.bin("foo").is_file()); + // We expect a file of the form bin/foo-{metadata_hash} + assert!(p.bin("examples/foo").is_file()); +} + +#[cargo_test] +fn compile_then_delete() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + p.cargo("run -v").run(); + assert!(p.bin("foo").is_file()); + if cfg!(windows) { + // On windows unlinking immediately after running often fails, so sleep + sleep_ms(100); + } + fs::remove_file(&p.bin("foo")).unwrap(); + p.cargo("run -v").run(); +} + +#[cargo_test] +fn transitive_dependencies_not_available() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.aaaaa] + path = "a" + "#, + ) + .file( + "src/main.rs", + "extern crate bbbbb; extern crate aaaaa; fn main() {}", + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "aaaaa" + version = "0.0.1" + authors = [] + + [dependencies.bbbbb] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "extern crate bbbbb;") + .file("b/Cargo.toml", &basic_manifest("bbbbb", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr_contains("[..] can't find crate for `bbbbb`[..]") + .run(); +} + +#[cargo_test] +fn cyclic_deps_rejected() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = ".." + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( +"[ERROR] cyclic package dependency: package `a v0.0.1 ([CWD]/a)` depends on itself. Cycle: +package `a v0.0.1 ([CWD]/a)` + ... which is depended on by `foo v0.0.1 ([CWD])`", + ).run(); +} + +#[cargo_test] +fn predictable_filenames() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["dylib", "rlib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v").run(); + assert!(p.root().join("target/debug/libfoo.rlib").is_file()); + let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + assert!(p.root().join("target/debug").join(dylib_name).is_file()); +} + +#[cargo_test] +fn dashes_to_underscores() { + let p = project() + .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1")) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo_bar; fn main() {}") + .build(); + + p.cargo("build -v").run(); + assert!(p.bin("foo-bar").is_file()); +} + +#[cargo_test] +fn dashes_in_crate_name_bad() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo-bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo_bar; fn main() {}") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` + +Caused by: + library target names cannot contain hyphens: foo-bar +", + ) + .run(); +} + +#[cargo_test] +fn rustc_env_var() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build -v") + .env("RUSTC", "rustc-that-does-not-exist") + .with_status(101) + .with_stderr( + "\ +[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..]) + +Caused by: +[..] +", + ) + .run(); + assert!(!p.bin("a").is_file()); +} + +#[cargo_test] +fn filtering() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + p.cargo("build --lib").run(); + assert!(!p.bin("a").is_file()); + + p.cargo("build --bin=a --example=a").run(); + assert!(p.bin("a").is_file()); + assert!(!p.bin("b").is_file()); + assert!(p.bin("examples/a").is_file()); + assert!(!p.bin("examples/b").is_file()); +} + +#[cargo_test] +fn filtering_implicit_bins() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + p.cargo("build --bins").run(); + assert!(p.bin("a").is_file()); + assert!(p.bin("b").is_file()); + assert!(!p.bin("examples/a").is_file()); + assert!(!p.bin("examples/b").is_file()); +} + +#[cargo_test] +fn filtering_implicit_examples() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .build(); + + p.cargo("build --examples").run(); + assert!(!p.bin("a").is_file()); + assert!(!p.bin("b").is_file()); + assert!(p.bin("examples/a").is_file()); + assert!(p.bin("examples/b").is_file()); +} + +#[cargo_test] +fn ignore_dotfile() { + let p = project() + .file("src/bin/.a.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn ignore_dotdirs() { + let p = project() + .file("src/bin/a.rs", "fn main() {}") + .file(".git/Cargo.toml", "") + .file(".pc/dummy-fix.patch/Cargo.toml", "") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn dotdir_root() { + let p = ProjectBuilder::new(root().join(".foo")) + .file("src/bin/a.rs", "fn main() {}") + .build(); + p.cargo("build").run(); +} + +#[cargo_test] +fn custom_target_dir_env() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); + + p.cargo("build").env("CARGO_TARGET_DIR", "foo/target").run(); + assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); + assert!(!p.root().join("target/debug").join(&exe_name).is_file()); + + p.cargo("build").run(); + assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); + assert!(p.root().join("target/debug").join(&exe_name).is_file()); + + fs::create_dir(p.root().join(".cargo")).unwrap(); + File::create(p.root().join(".cargo/config")) + .unwrap() + .write_all( + br#" + [build] + target-dir = "foo/target" + "#, + ) + .unwrap(); + p.cargo("build").env("CARGO_TARGET_DIR", "bar/target").run(); + assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); + assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); + assert!(p.root().join("target/debug").join(&exe_name).is_file()); +} + +#[cargo_test] +fn custom_target_dir_line_parameter() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); + + p.cargo("build --target-dir foo/target").run(); + assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); + assert!(!p.root().join("target/debug").join(&exe_name).is_file()); + + p.cargo("build").run(); + assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); + assert!(p.root().join("target/debug").join(&exe_name).is_file()); + + fs::create_dir(p.root().join(".cargo")).unwrap(); + File::create(p.root().join(".cargo/config")) + .unwrap() + .write_all( + br#" + [build] + target-dir = "foo/target" + "#, + ) + .unwrap(); + p.cargo("build --target-dir bar/target").run(); + assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); + assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); + assert!(p.root().join("target/debug").join(&exe_name).is_file()); + + p.cargo("build --target-dir foobar/target") + .env("CARGO_TARGET_DIR", "bar/target") + .run(); + assert!(p + .root() + .join("foobar/target/debug") + .join(&exe_name) + .is_file()); + assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); + assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); + assert!(p.root().join("target/debug").join(&exe_name).is_file()); +} + +#[cargo_test] +fn build_multiple_packages() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [[bin]] + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", &basic_bin_manifest("d1")) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + doctest = false + "#, + ) + .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + p.cargo("build -p d1 -p d2 -p foo").run(); + + assert!(p.bin("foo").is_file()); + p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); + + let d1_path = &p + .build_dir() + .join("debug") + .join(format!("d1{}", env::consts::EXE_SUFFIX)); + let d2_path = &p + .build_dir() + .join("debug") + .join(format!("d2{}", env::consts::EXE_SUFFIX)); + + assert!(d1_path.is_file()); + p.process(d1_path).with_stdout("d1").run(); + + assert!(d2_path.is_file()); + p.process(d2_path).with_stdout("d2").run(); +} + +#[cargo_test] +fn invalid_spec() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + + [[bin]] + name = "foo" + "#, + ) + .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", &basic_bin_manifest("d1")) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .build(); + + p.cargo("build -p notAValidDep") + .with_status(101) + .with_stderr("[ERROR] package ID specification `notAValidDep` matched no packages") + .run(); + + p.cargo("build -p d1 -p notAValidDep") + .with_status(101) + .with_stderr("[ERROR] package ID specification `notAValidDep` matched no packages") + .run(); +} + +#[cargo_test] +fn manifest_with_bom_is_ok() { + let p = project() + .file( + "Cargo.toml", + "\u{FEFF} + [package] + name = \"foo\" + version = \"0.0.1\" + authors = [] + ", + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn panic_abort_compiles_with_panic_abort() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build -v") + .with_stderr_contains("[..] -C panic=abort [..]") + .run(); +} + +#[cargo_test] +fn explicit_color_config_is_propagated_to_rustc() { + let p = project() + .file("Cargo.toml", &basic_manifest("test", "0.0.0")) + .file("src/lib.rs", "") + .build(); + p.cargo("build -v --color always") + .with_stderr_contains("[..]rustc [..] src/lib.rs --color always[..]") + .run(); + + p.cargo("clean").run(); + + p.cargo("build -v --color never") + .with_stderr( + "\ +[COMPILING] test v0.0.0 ([..]) +[RUNNING] `rustc [..] --color never [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn compiler_json_error_format() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "build.rs", + "fn main() { println!(\"cargo:rustc-cfg=xyz\") }", + ) + .file("src/main.rs", "fn main() { let unused = 92; }") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("bar/src/lib.rs", r#"fn dead() {}"#) + .build(); + + // Use `jobs=1` to ensure that the order of messages is consistent. + p.cargo("build -v --message-format=json --jobs=1") + .with_json( + r#" + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["custom-build"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"build-script-build", + "src_path":"[..]build.rs" + }, + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "executable": null, + "features": [], + "filenames": "{...}", + "fresh": false + } + + { + "reason":"compiler-message", + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "doctest": true, + "edition": "2015", + "name":"bar", + "src_path":"[..]lib.rs" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "executable": null, + "features": [], + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "doctest": true, + "edition": "2015", + "name":"bar", + "src_path":"[..]lib.rs" + }, + "filenames":[ + "[..].rlib", + "[..].rmeta" + ], + "fresh": false + } + + { + "reason":"build-script-executed", + "package_id":"foo 0.5.0 ([..])", + "linked_libs":[], + "linked_paths":[], + "env":[], + "cfgs":["xyz"] + } + + { + "reason":"compiler-message", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"foo", + "src_path":"[..]main.rs" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"foo", + "src_path":"[..]main.rs" + }, + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "executable": "[..]/foo/target/debug/foo[EXE]", + "features": [], + "filenames": "{...}", + "fresh": false + } +"#, + ) + .run(); + + // With fresh build, we should repeat the artifacts, + // but omit compiler warnings. + p.cargo("build -v --message-format=json --jobs=1") + .with_json( + r#" + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["custom-build"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"build-script-build", + "src_path":"[..]build.rs" + }, + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "executable": null, + "features": [], + "filenames": "{...}", + "fresh": true + } + + { + "reason":"compiler-artifact", + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "executable": null, + "features": [], + "package_id":"bar 0.5.0 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "doctest": true, + "edition": "2015", + "name":"bar", + "src_path":"[..]lib.rs" + }, + "filenames":[ + "[..].rlib", + "[..].rmeta" + ], + "fresh": true + } + + { + "reason":"build-script-executed", + "package_id":"foo 0.5.0 ([..])", + "linked_libs":[], + "linked_paths":[], + "env":[], + "cfgs":["xyz"] + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"foo", + "src_path":"[..]main.rs" + }, + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "0", + "overflow_checks": true, + "test": false + }, + "executable": "[..]/foo/target/debug/foo[EXE]", + "features": [], + "filenames": "{...}", + "fresh": true + } +"#, + ) + .run(); +} + +#[cargo_test] +fn wrong_message_format_option() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --message-format XML") + .with_status(1) + .with_stderr_contains( + "\ +error: 'XML' isn't a valid value for '--message-format ' +[possible values: human, json, short] +", + ) + .run(); +} + +#[cargo_test] +fn message_format_json_forward_stderr() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() { let unused = 0; }") + .build(); + + p.cargo("rustc --release --bin foo --message-format JSON") + .with_json( + r#" + { + "reason":"compiler-message", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"foo", + "src_path":"[..]" + }, + "message":"{...}" + } + + { + "reason":"compiler-artifact", + "package_id":"foo 0.5.0 ([..])", + "target":{ + "kind":["bin"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"foo", + "src_path":"[..]" + }, + "profile":{ + "debug_assertions":false, + "debuginfo":null, + "opt_level":"3", + "overflow_checks": false, + "test":false + }, + "executable": "{...}", + "features":[], + "filenames": "{...}", + "fresh": false + } +"#, + ) + .run(); +} + +#[cargo_test] +fn no_warn_about_package_metadata() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [package.metadata] + foo = "bar" + a = true + b = 3 + + [package.metadata.another] + bar = 3 + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build") + .with_stderr( + "[..] foo v0.0.1 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn cargo_build_empty_target() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --target") + .arg("") + .with_status(101) + .with_stderr_contains("[..] target was empty") + .run(); +} + +#[cargo_test] +fn build_all_workspace() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build --all") + .with_stderr( + "[..] Compiling bar v0.1.0 ([..])\n\ + [..] Compiling foo v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn build_all_exclude() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") + .build(); + + p.cargo("build --all --exclude baz") + .with_stderr_contains("[..]Compiling foo v0.1.0 [..]") + .with_stderr_contains("[..]Compiling bar v0.1.0 [..]") + .with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]") + .run(); +} + +#[cargo_test] +fn build_all_workspace_implicit_examples() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("src/bin/b.rs", "fn main() {}") + .file("examples/c.rs", "fn main() {}") + .file("examples/d.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .file("bar/src/bin/e.rs", "fn main() {}") + .file("bar/src/bin/f.rs", "fn main() {}") + .file("bar/examples/g.rs", "fn main() {}") + .file("bar/examples/h.rs", "fn main() {}") + .build(); + + p.cargo("build --all --examples") + .with_stderr( + "[..] Compiling bar v0.1.0 ([..])\n\ + [..] Compiling foo v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); + assert!(!p.bin("a").is_file()); + assert!(!p.bin("b").is_file()); + assert!(p.bin("examples/c").is_file()); + assert!(p.bin("examples/d").is_file()); + assert!(!p.bin("e").is_file()); + assert!(!p.bin("f").is_file()); + assert!(p.bin("examples/g").is_file()); + assert!(p.bin("examples/h").is_file()); +} + +#[cargo_test] +fn build_all_virtual_manifest() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + // The order in which bar and baz are built is not guaranteed + p.cargo("build --all") + .with_stderr_contains("[..] Compiling baz v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr( + "[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn build_virtual_manifest_all_implied() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + // The order in which `bar` and `baz` are built is not guaranteed. + p.cargo("build") + .with_stderr_contains("[..] Compiling baz v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr( + "[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn build_virtual_manifest_one_project() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("build -p bar") + .with_stderr_does_not_contain("[..]baz[..]") + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr( + "[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn build_all_virtual_manifest_implicit_examples() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .file("bar/src/bin/a.rs", "fn main() {}") + .file("bar/src/bin/b.rs", "fn main() {}") + .file("bar/examples/c.rs", "fn main() {}") + .file("bar/examples/d.rs", "fn main() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "") + .file("baz/src/bin/e.rs", "fn main() {}") + .file("baz/src/bin/f.rs", "fn main() {}") + .file("baz/examples/g.rs", "fn main() {}") + .file("baz/examples/h.rs", "fn main() {}") + .build(); + + // The order in which bar and baz are built is not guaranteed + p.cargo("build --all --examples") + .with_stderr_contains("[..] Compiling baz v0.1.0 ([..])") + .with_stderr_contains("[..] Compiling bar v0.1.0 ([..])") + .with_stderr( + "[..] Compiling [..] v0.1.0 ([..])\n\ + [..] Compiling [..] v0.1.0 ([..])\n\ + [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); + assert!(!p.bin("a").is_file()); + assert!(!p.bin("b").is_file()); + assert!(p.bin("examples/c").is_file()); + assert!(p.bin("examples/d").is_file()); + assert!(!p.bin("e").is_file()); + assert!(!p.bin("f").is_file()); + assert!(p.bin("examples/g").is_file()); + assert!(p.bin("examples/h").is_file()); +} + +#[cargo_test] +fn build_all_member_dependency_same_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + a = "0.1.0" + "#, + ) + .file("a/src/lib.rs", "pub fn a() {}") + .build(); + + Package::new("a", "0.1.0").publish(); + + p.cargo("build --all") + .with_stderr( + "[UPDATING] `[..]` index\n\ + [DOWNLOADING] crates ...\n\ + [DOWNLOADED] a v0.1.0 ([..])\n\ + [COMPILING] a v0.1.0\n\ + [COMPILING] a v0.1.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn run_proper_binary() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "main" + [[bin]] + name = "other" + "#, + ) + .file("src/lib.rs", "") + .file( + "src/bin/main.rs", + r#"fn main() { panic!("This should never be run."); }"#, + ) + .file("src/bin/other.rs", "fn main() {}") + .build(); + + p.cargo("run --bin other").run(); +} + +#[cargo_test] +fn run_proper_binary_main_rs() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/lib.rs", "") + .file("src/bin/main.rs", "fn main() {}") + .build(); + + p.cargo("run --bin foo").run(); +} + +#[cargo_test] +fn run_proper_alias_binary_from_src() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + [[bin]] + name = "bar" + "#, + ) + .file("src/foo.rs", r#"fn main() { println!("foo"); }"#) + .file("src/bar.rs", r#"fn main() { println!("bar"); }"#) + .build(); + + p.cargo("build --all").run(); + p.process(&p.bin("foo")).with_stdout("foo\n").run(); + p.process(&p.bin("bar")).with_stdout("bar\n").run(); +} + +#[cargo_test] +fn run_proper_alias_binary_main_rs() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + [[bin]] + name = "foo" + [[bin]] + name = "bar" + "#, + ) + .file("src/main.rs", r#"fn main() { println!("main"); }"#) + .build(); + + p.cargo("build --all").run(); + p.process(&p.bin("foo")).with_stdout("main\n").run(); + p.process(&p.bin("bar")).with_stdout("main\n").run(); +} + +#[cargo_test] +fn run_proper_binary_main_rs_as_foo() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + r#" fn main() { panic!("This should never be run."); }"#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("run --bin foo").run(); +} + +#[cargo_test] +// NOTE: we don't have `/usr/bin/env` on Windows. +#[cfg(not(windows))] +fn rustc_wrapper() { + let p = project().file("src/lib.rs", "").build(); + p.cargo("build -v") + .env("RUSTC_WRAPPER", "/usr/bin/env") + .with_stderr_contains("[RUNNING] `/usr/bin/env rustc --crate-name foo [..]") + .run(); +} + +#[cargo_test] +#[cfg(not(windows))] +fn rustc_wrapper_relative() { + let p = project().file("src/lib.rs", "").build(); + p.cargo("build -v") + .env("RUSTC_WRAPPER", "./sccache") + .with_status(101) + .with_stderr_contains("[..]/foo/./sccache rustc[..]") + .run(); +} + +#[cargo_test] +#[cfg(not(windows))] +fn rustc_wrapper_from_path() { + let p = project().file("src/lib.rs", "").build(); + p.cargo("build -v") + .env("RUSTC_WRAPPER", "wannabe_sccache") + .with_status(101) + .with_stderr_contains("[..]`wannabe_sccache rustc [..]") + .run(); +} + +#[cargo_test] +fn cdylib_not_lifted() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [lib] + crate-type = ["cdylib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + let files = if cfg!(windows) { + vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"] + } else if cfg!(target_os = "macos") { + vec!["libfoo.dylib"] + } else { + vec!["libfoo.so"] + }; + + for file in files { + println!("checking: {}", file); + assert!(p.root().join("target/debug/deps").join(&file).is_file()); + } +} + +#[cargo_test] +fn cdylib_final_outputs() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo-bar" + authors = [] + version = "0.1.0" + + [lib] + crate-type = ["cdylib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + let files = if cfg!(windows) { + vec!["foo_bar.dll.lib", "foo_bar.dll"] + } else if cfg!(target_os = "macos") { + vec!["libfoo_bar.dylib"] + } else { + vec!["libfoo_bar.so"] + }; + + for file in files { + println!("checking: {}", file); + assert!(p.root().join("target/debug").join(&file).is_file()); + } +} + +#[cargo_test] +fn deterministic_cfg_flags() { + // This bug is non-deterministic. + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + build = "build.rs" + + [features] + default = ["f_a", "f_b", "f_c", "f_d"] + f_a = [] + f_b = [] + f_c = [] + f_d = [] + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=cfg_a"); + println!("cargo:rustc-cfg=cfg_b"); + println!("cargo:rustc-cfg=cfg_c"); + println!("cargo:rustc-cfg=cfg_d"); + println!("cargo:rustc-cfg=cfg_e"); + } + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.1.0 [..] +[RUNNING] [..] +[RUNNING] [..] +[RUNNING] `rustc --crate-name foo [..] \ +--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\ +--cfg[..]f_c[..]--cfg[..]f_d[..] \ +--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); +} + +#[cargo_test] +fn explicit_bins_without_paths() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "foo" + + [[bin]] + name = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn no_bin_in_src_with_lib() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/lib.rs", "") + .file("src/foo.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + can't find `foo` bin, specify bin.path", + ) + .run(); +} + +#[cargo_test] +fn inferred_bins() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .file("src/bin/baz/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("bar").is_file()); + assert!(p.bin("baz").is_file()); +} + +#[cargo_test] +fn inferred_bins_duplicate_name() { + // this should fail, because we have two binaries with the same name + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .file("src/bin/bar/main.rs", "fn main() {}") + .build(); + + p.cargo("build").with_status(101).with_stderr_contains( + "[..]found duplicate binary name bar, but all binary targets must have a unique name[..]", + ) + .run(); +} + +#[cargo_test] +fn inferred_bin_path() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "bar" + # Note, no `path` key! + "#, + ) + .file("src/bin/bar/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); + assert!(p.bin("bar").is_file()); +} + +#[cargo_test] +fn inferred_examples() { + let p = project() + .file("src/lib.rs", "fn main() {}") + .file("examples/bar.rs", "fn main() {}") + .file("examples/baz/main.rs", "fn main() {}") + .build(); + + p.cargo("build --examples").run(); + assert!(p.bin("examples/bar").is_file()); + assert!(p.bin("examples/baz").is_file()); +} + +#[cargo_test] +fn inferred_tests() { + let p = project() + .file("src/lib.rs", "fn main() {}") + .file("tests/bar.rs", "fn main() {}") + .file("tests/baz/main.rs", "fn main() {}") + .build(); + + p.cargo("test --test=bar --test=baz").run(); +} + +#[cargo_test] +fn inferred_benchmarks() { + let p = project() + .file("src/lib.rs", "fn main() {}") + .file("benches/bar.rs", "fn main() {}") + .file("benches/baz/main.rs", "fn main() {}") + .build(); + + p.cargo("bench --bench=bar --bench=baz").run(); +} + +#[cargo_test] +fn target_edition() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + edition = "2018" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + // Passes on nightly, fails on stable, since `--edition` is nightly-only. + .without_status() + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..]--edition=2018 [..] +", + ) + .run(); +} + +#[cargo_test] +fn target_edition_override() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + edition = "2018" + + [lib] + edition = "2015" + "#, + ) + .file( + "src/lib.rs", + " + pub fn async() {} + pub fn try() {} + pub fn await() {} + ", + ) + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn same_metadata_different_directory() { + // A top-level crate built in two different workspaces should have the + // same metadata hash. + let p = project() + .at("foo1") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + let output = t!(String::from_utf8( + t!(p.cargo("build -v").exec_with_output()).stderr, + )); + let metadata = output + .split_whitespace() + .find(|arg| arg.starts_with("metadata=")) + .unwrap(); + + let p = project() + .at("foo2") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("build -v") + .with_stderr_contains(format!("[..]{}[..]", metadata)) + .run(); +} + +#[cargo_test] +fn building_a_dependent_crate_witout_bin_should_fail() { + Package::new("testless", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "testless" + version = "0.1.0" + + [[bin]] + name = "a_bin" + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + testless = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]can't find `a_bin` bin, specify bin.path") + .run(); +} + +#[cargo_test] +#[cfg(any(target_os = "macos", target_os = "ios"))] +fn uplift_dsym_of_bin_on_mac() { + let p = project() + .file("src/main.rs", "fn main() { panic!(); }") + .file("src/bin/b.rs", "fn main() { panic!(); }") + .file("examples/c.rs", "fn main() { panic!(); }") + .file("tests/d.rs", "fn main() { panic!(); }") + .build(); + + p.cargo("build --bins --examples --tests").run(); + assert!(p.target_debug_dir().join("foo.dSYM").is_dir()); + assert!(p.target_debug_dir().join("b.dSYM").is_dir()); + assert!(p.target_debug_dir().join("b.dSYM").is_symlink()); + assert!(p.target_debug_dir().join("examples/c.dSYM").is_symlink()); + assert!(!p.target_debug_dir().join("c.dSYM").exists()); + assert!(!p.target_debug_dir().join("d.dSYM").exists()); +} + +#[cargo_test] +#[cfg(all(target_os = "windows", target_env = "msvc"))] +fn uplift_pdb_of_bin_on_windows() { + let p = project() + .file("src/main.rs", "fn main() { panic!(); }") + .file("src/bin/b.rs", "fn main() { panic!(); }") + .file("examples/c.rs", "fn main() { panic!(); }") + .file("tests/d.rs", "fn main() { panic!(); }") + .build(); + + p.cargo("build --bins --examples --tests").run(); + assert!(p.target_debug_dir().join("foo.pdb").is_file()); + assert!(p.target_debug_dir().join("b.pdb").is_file()); + assert!(!p.target_debug_dir().join("examples/c.pdb").exists()); + assert_eq!(p.glob("target/debug/examples/c-*.pdb").count(), 1); + assert!(!p.target_debug_dir().join("c.pdb").exists()); + assert!(!p.target_debug_dir().join("d.pdb").exists()); +} + +// Ensure that `cargo build` chooses the correct profile for building +// targets based on filters (assuming `--profile` is not specified). +#[cargo_test] +fn build_filter_infer_profile() { + let p = project() + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("tests/t1.rs", "") + .file("benches/b1.rs", "") + .file("examples/ex1.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link[..]", + ) + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \ + --emit=[..]link[..]", + ) + .run(); + + p.root().join("target").rm_rf(); + p.cargo("build -v --test=t1") + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 [..]", + ) + .with_stderr_contains( + "[RUNNING] `rustc --crate-name t1 tests/t1.rs --color never --emit=[..]link \ + -C debuginfo=2 [..]", + ) + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \ + --emit=[..]link -C debuginfo=2 [..]", + ) + .run(); + + p.root().join("target").rm_rf(); + // Bench uses test profile without `--release`. + p.cargo("build -v --bench=b1") + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 [..]", + ) + .with_stderr_contains( + "[RUNNING] `rustc --crate-name b1 benches/b1.rs --color never --emit=[..]link \ + -C debuginfo=2 [..]", + ) + .with_stderr_does_not_contain("opt-level") + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \ + --emit=[..]link -C debuginfo=2 [..]", + ) + .run(); +} + +#[cargo_test] +fn targets_selected_default() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("build -v") + // Binaries. + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \ + --emit=[..]link[..]", + ) + // Benchmarks. + .with_stderr_does_not_contain( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=[..]link \ + -C opt-level=3 --test [..]", + ) + // Unit tests. + .with_stderr_does_not_contain( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=[..]link \ + -C debuginfo=2 --test [..]", + ) + .run(); +} + +#[cargo_test] +fn targets_selected_all() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("build -v --all-targets") + // Binaries. + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \ + --emit=[..]link[..]", + ) + // Unit tests. + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=[..]link \ + -C debuginfo=2 --test [..]", + ) + .run(); +} + +#[cargo_test] +fn all_targets_no_lib() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("build -v --all-targets") + // Binaries. + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \ + --emit=[..]link[..]", + ) + // Unit tests. + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=[..]link \ + -C debuginfo=2 --test [..]", + ) + .run(); +} + +#[cargo_test] +fn no_linkable_target() { + // Issue 3169: this is currently not an error as per discussion in PR #4797. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + [dependencies] + the_lib = { path = "the_lib" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "the_lib/Cargo.toml", + r#" + [package] + name = "the_lib" + version = "0.1.0" + [lib] + name = "the_lib" + crate-type = ["staticlib"] + "#, + ) + .file("the_lib/src/lib.rs", "pub fn foo() {}") + .build(); + p.cargo("build") + .with_stderr_contains( + "[WARNING] The package `the_lib` provides no linkable [..] \ + while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]", + ) + .run(); +} + +#[cargo_test] +fn avoid_dev_deps() { + Package::new("foo", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dev-dependencies] + baz = "1.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] +[ERROR] no matching package named `baz` found +location searched: registry `https://github.com/rust-lang/crates.io-index` +required by package `bar v0.1.0 ([..]/foo)` +", + ) + .run(); + p.cargo("build -Zavoid-dev-deps") + .masquerade_as_nightly_cargo() + .run(); +} + +#[cargo_test] +fn invalid_jobs() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("build --jobs over9000") + .with_status(1) + .with_stderr("error: Invalid value: could not parse `over9000` as a number") + .run(); +} + +#[cargo_test] +fn target_filters_workspace() { + let ws = project() + .at("ws") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_lib_manifest("a")) + .file("a/src/lib.rs", "") + .file("a/examples/ex1.rs", "fn main() {}") + .file("b/Cargo.toml", &basic_bin_manifest("b")) + .file("b/src/lib.rs", "") + .file("b/src/main.rs", "fn main() {}") + .build(); + + ws.cargo("build -v --example ex") + .with_status(101) + .with_stderr( + "\ +[ERROR] no example target named `ex` + +Did you mean `ex1`?", + ) + .run(); + + ws.cargo("build -v --lib") + .with_stderr_contains("[RUNNING] `rustc [..]a/src/lib.rs[..]") + .with_stderr_contains("[RUNNING] `rustc [..]b/src/lib.rs[..]") + .run(); + + ws.cargo("build -v --example ex1") + .with_stderr_contains("[RUNNING] `rustc [..]a/examples/ex1.rs[..]") + .run(); +} + +#[cargo_test] +fn target_filters_workspace_not_found() { + let ws = project() + .at("ws") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_bin_manifest("a")) + .file("a/src/main.rs", "fn main() {}") + .file("b/Cargo.toml", &basic_bin_manifest("b")) + .file("b/src/main.rs", "fn main() {}") + .build(); + + ws.cargo("build -v --lib") + .with_status(101) + .with_stderr("[ERROR] no library targets found in packages: a, b") + .run(); +} + +#[cfg(unix)] +#[cargo_test] +fn signal_display() { + // Cause the compiler to crash with a signal. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [dependencies] + pm = { path = "pm" } + "#, + ) + .file( + "src/lib.rs", + r#" + #[macro_use] + extern crate pm; + + #[derive(Foo)] + pub struct S; + "#, + ) + .file( + "pm/Cargo.toml", + r#" + [package] + name = "pm" + version = "0.1.0" + [lib] + proc-macro = true + "#, + ) + .file( + "pm/src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Foo)] + pub fn derive(_input: TokenStream) -> TokenStream { + std::process::abort() + } + "#, + ) + .build(); + + foo.cargo("build") + .with_stderr( + "\ +[COMPILING] pm [..] +[COMPILING] foo [..] +[ERROR] Could not compile `foo`. + +Caused by: + process didn't exit successfully: `rustc [..]` (signal: 6, SIGABRT: process abort signal) +", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn tricky_pipelining() { + if !crate::support::is_nightly() { + return; + } + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .build(); + + foo.cargo("build -p bar") + .env("CARGO_BUILD_PIPELINING", "true") + .run(); + foo.cargo("build -p foo") + .env("CARGO_BUILD_PIPELINING", "true") + .run(); +} + +#[cargo_test] +fn pipelining_works() { + if !crate::support::is_nightly() { + return; + } + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .build(); + + foo.cargo("build") + .env("CARGO_BUILD_PIPELINING", "true") + .with_stdout("") + .with_stderr( + "\ +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn pipelining_big_graph() { + if !crate::support::is_nightly() { + return; + } + + // Create a crate graph of the form {a,b}{0..29}, where {a,b}(n) depend on {a,b}(n+1) + // Then have `foo`, a binary crate, depend on the whole thing. + let mut project = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [dependencies] + a1 = { path = "a1" } + b1 = { path = "b1" } + "#, + ) + .file("src/main.rs", "fn main(){}"); + + for n in 0..30 { + for x in &["a", "b"] { + project = project + .file( + &format!("{x}{n}/Cargo.toml", x = x, n = n), + &format!( + r#" + [package] + name = "{x}{n}" + version = "0.1.0" + [dependencies] + a{np1} = {{ path = "../a{np1}" }} + b{np1} = {{ path = "../b{np1}" }} + "#, + x = x, + n = n, + np1 = n + 1 + ), + ) + .file(&format!("{x}{n}/src/lib.rs", x = x, n = n), ""); + } + } + + let foo = project + .file("a30/Cargo.toml", &basic_lib_manifest("a30")) + .file( + "a30/src/lib.rs", + r#"compile_error!("don't actually build me");"#, + ) + .file("b30/Cargo.toml", &basic_lib_manifest("b30")) + .file("b30/src/lib.rs", "") + .build(); + foo.cargo("build -p foo") + .env("CARGO_BUILD_PIPELINING", "true") + .with_status(101) + .with_stderr_contains("[ERROR] Could not compile `a30`[..]") + .run(); +} + +#[cargo_test] +fn forward_rustc_output() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + edition = '2018' + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "bar::foo!();") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [lib] + proc-macro = true + "#, + ) + .file( + "bar/src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::*; + + #[proc_macro] + pub fn foo(input: TokenStream) -> TokenStream { + println!("a"); + println!("b"); + println!("{{}}"); + eprintln!("c"); + eprintln!("d"); + eprintln!("{{a"); // "malformed json" + input + } + "#, + ) + .build(); + + foo.cargo("build") + .with_stdout("a\nb\n{}") + .with_stderr( + "\ +[COMPILING] [..] +[COMPILING] [..] +c +d +{a +[FINISHED] [..] +", + ) + .run(); +} diff --git a/tests/testsuite/build_auth.rs b/tests/testsuite/build_auth.rs new file mode 100644 index 00000000000..2509f90a32f --- /dev/null +++ b/tests/testsuite/build_auth.rs @@ -0,0 +1,263 @@ +use std::collections::HashSet; +use std::io::prelude::*; +use std::io::BufReader; +use std::net::TcpListener; +use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; +use std::sync::Arc; +use std::thread; + +use crate::support::paths; +use crate::support::{basic_manifest, project}; +use git2; + +// Tests that HTTP auth is offered from `credential.helper`. +#[cargo_test] +fn http_auth_offered() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + + fn headers(rdr: &mut dyn BufRead) -> HashSet { + let valid = ["GET", "Authorization", "Accept"]; + rdr.lines() + .map(|s| s.unwrap()) + .take_while(|s| s.len() > 2) + .map(|s| s.trim().to_string()) + .filter(|s| valid.iter().any(|prefix| s.starts_with(*prefix))) + .collect() + } + + let connections = Arc::new(AtomicUsize::new(0)); + let connections2 = connections.clone(); + let t = thread::spawn(move || { + let mut conn = BufReader::new(server.accept().unwrap().0); + let req = headers(&mut conn); + connections2.fetch_add(1, SeqCst); + conn.get_mut() + .write_all( + b"HTTP/1.1 401 Unauthorized\r\n\ + WWW-Authenticate: Basic realm=\"wheee\"\r\n\ + Content-Length: 0\r\n\ + \r\n", + ) + .unwrap(); + assert_eq!( + req, + vec![ + "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", + "Accept: */*", + ] + .into_iter() + .map(|s| s.to_string()) + .collect() + ); + + let req = headers(&mut conn); + connections2.fetch_add(1, SeqCst); + conn.get_mut() + .write_all( + b"HTTP/1.1 401 Unauthorized\r\n\ + WWW-Authenticate: Basic realm=\"wheee\"\r\n\ + \r\n", + ) + .unwrap(); + assert_eq!( + req, + vec![ + "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", + "Authorization: Basic Zm9vOmJhcg==", + "Accept: */*", + ] + .into_iter() + .map(|s| s.to_string()) + .collect() + ); + }); + + let script = project() + .at("script") + .file("Cargo.toml", &basic_manifest("script", "0.1.0")) + .file( + "src/main.rs", + r#" + fn main() { + println!("username=foo"); + println!("password=bar"); + } + "#, + ) + .build(); + + script.cargo("build -v").run(); + let script = script.bin("script"); + + let config = paths::home().join(".gitconfig"); + let mut config = git2::Config::open(&config).unwrap(); + config + .set_str( + "credential.helper", + // This is a bash script so replace `\` with `/` for Windows + &script.display().to_string().replace("\\", "/"), + ) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "http://127.0.0.1:{}/foo/bar" + "#, + addr.port() + ), + ) + .file("src/main.rs", "") + .file( + ".cargo/config", + "[net] + retry = 0 + ", + ) + .build(); + + // This is a "contains" check because the last error differs by platform, + // may span multiple lines, and isn't relevant to this test. + p.cargo("build") + .with_status(101) + .with_stderr_contains(&format!( + "\ +[UPDATING] git repository `http://{addr}/foo/bar` +[ERROR] failed to load source for a dependency on `bar` + +Caused by: + Unable to update http://{addr}/foo/bar + +Caused by: + failed to clone into: [..] + +Caused by: + failed to authenticate when downloading repository +attempted to find username/password via `credential.helper`, but [..] + +Caused by: +", + addr = addr + )) + .run(); + + assert_eq!(connections.load(SeqCst), 2); + t.join().ok().unwrap(); +} + +// Boy, sure would be nice to have a TLS implementation in rust! +#[cargo_test] +fn https_something_happens() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + let t = thread::spawn(move || { + let mut conn = server.accept().unwrap().0; + drop(conn.write(b"1234")); + drop(conn.shutdown(std::net::Shutdown::Write)); + drop(conn.read(&mut [0; 16])); + }); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "https://127.0.0.1:{}/foo/bar" + "#, + addr.port() + ), + ) + .file("src/main.rs", "") + .file( + ".cargo/config", + "[net] + retry = 0 + ", + ) + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr_contains(&format!( + "[UPDATING] git repository `https://{addr}/foo/bar`", + addr = addr + )) + .with_stderr_contains(&format!( + "\ +Caused by: + {errmsg} +", + errmsg = if cfg!(windows) { + "[..]failed to send request: [..]" + } else if cfg!(target_os = "macos") { + // macOS is difficult to tests as some builds may use Security.framework, + // while others may use OpenSSL. In that case, let's just not verify the error + // message here. + "[..]" + } else { + "[..]SSL error: [..]" + } + )) + .run(); + + t.join().ok().unwrap(); +} + +// It would sure be nice to have an SSH implementation in Rust! +#[cargo_test] +fn ssh_something_happens() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + let t = thread::spawn(move || { + drop(server.accept().unwrap()); + }); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "ssh://127.0.0.1:{}/foo/bar" + "#, + addr.port() + ), + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr_contains(&format!( + "[UPDATING] git repository `ssh://{addr}/foo/bar`", + addr = addr + )) + .with_stderr_contains( + "\ +Caused by: + [..]failed to start SSH session: Failed getting banner[..] +", + ) + .run(); + t.join().ok().unwrap(); +} diff --git a/tests/testsuite/build_lib.rs b/tests/testsuite/build_lib.rs new file mode 100644 index 00000000000..f93187d535d --- /dev/null +++ b/tests/testsuite/build_lib.rs @@ -0,0 +1,63 @@ +use crate::support::{basic_bin_manifest, basic_manifest, project}; + +#[cargo_test] +fn build_lib_only() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("build --lib -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); +} + +#[cargo_test] +fn build_with_no_lib() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --lib") + .with_status(101) + .with_stderr("[ERROR] no library targets found in package `foo`") + .run(); +} + +#[cargo_test] +fn build_with_relative_cargo_home_path() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["wycats@example.com"] + + [dependencies] + + "test-dependency" = { path = "src/test_dependency" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("src/test_dependency/src/lib.rs", r#" "#) + .file( + "src/test_dependency/Cargo.toml", + &basic_manifest("test-dependency", "0.0.1"), + ) + .build(); + + p.cargo("build").env("CARGO_HOME", "./cargo_home/").run(); +} diff --git a/tests/testsuite/build_plan.rs b/tests/testsuite/build_plan.rs new file mode 100644 index 00000000000..f23078e4ba3 --- /dev/null +++ b/tests/testsuite/build_plan.rs @@ -0,0 +1,222 @@ +use crate::support::registry::Package; +use crate::support::{basic_bin_manifest, basic_manifest, main_file, project}; + +#[cargo_test] +fn cargo_build_plan_simple() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("build --build-plan -Zunstable-options") + .masquerade_as_nightly_cargo() + .with_json( + r#" + { + "inputs": [ + "[..]/foo/Cargo.toml" + ], + "invocations": [ + { + "args": "{...}", + "cwd": "[..]/cit/[..]/foo", + "deps": [], + "env": "{...}", + "kind": "Host", + "links": "{...}", + "outputs": "{...}", + "package_name": "foo", + "package_version": "0.5.0", + "program": "rustc", + "target_kind": ["bin"], + "compile_mode": "build" + } + ] + } + "#, + ) + .run(); + assert!(!p.bin("foo").is_file()); +} + +#[cargo_test] +fn cargo_build_plan_single_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.5.0" + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn foo() { bar::bar(); } + + #[test] + fn test() { foo(); } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + p.cargo("build --build-plan -Zunstable-options") + .masquerade_as_nightly_cargo() + .with_json( + r#" + { + "inputs": [ + "[..]/foo/Cargo.toml", + "[..]/foo/bar/Cargo.toml" + ], + "invocations": [ + { + "args": "{...}", + "cwd": "[..]/cit/[..]/foo", + "deps": [], + "env": "{...}", + "kind": "Host", + "links": "{...}", + "outputs": [ + "[..]/foo/target/debug/deps/libbar-[..].rlib", + "[..]/foo/target/debug/deps/libbar-[..].rmeta" + ], + "package_name": "bar", + "package_version": "0.0.1", + "program": "rustc", + "target_kind": ["lib"], + "compile_mode": "build" + }, + { + "args": "{...}", + "cwd": "[..]/cit/[..]/foo", + "deps": [0], + "env": "{...}", + "kind": "Host", + "links": "{...}", + "outputs": [ + "[..]/foo/target/debug/deps/libfoo-[..].rlib", + "[..]/foo/target/debug/deps/libfoo-[..].rmeta" + ], + "package_name": "foo", + "package_version": "0.5.0", + "program": "rustc", + "target_kind": ["lib"], + "compile_mode": "build" + } + ] + } + "#, + ) + .run(); +} + +#[cargo_test] +fn cargo_build_plan_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#, + ) + .file("src/main.rs", r#"fn main() {}"#) + .file("build.rs", r#"fn main() {}"#) + .build(); + + p.cargo("build --build-plan -Zunstable-options") + .masquerade_as_nightly_cargo() + .with_json( + r#" + { + "inputs": [ + "[..]/foo/Cargo.toml" + ], + "invocations": [ + { + "args": "{...}", + "cwd": "[..]/cit/[..]/foo", + "deps": [], + "env": "{...}", + "kind": "Host", + "links": "{...}", + "outputs": [ + "[..]/foo/target/debug/build/[..]/build_script_build-[..]" + ], + "package_name": "foo", + "package_version": "0.5.0", + "program": "rustc", + "target_kind": ["custom-build"], + "compile_mode": "build" + }, + { + "args": "{...}", + "cwd": "[..]/cit/[..]/foo", + "deps": [0], + "env": "{...}", + "kind": "Host", + "links": "{...}", + "outputs": [], + "package_name": "foo", + "package_version": "0.5.0", + "program": "[..]/build-script-build", + "target_kind": ["custom-build"], + "compile_mode": "run-custom-build" + }, + { + "args": "{...}", + "cwd": "[..]/cit/[..]/foo", + "deps": [1], + "env": "{...}", + "kind": "Host", + "links": "{...}", + "outputs": "{...}", + "package_name": "foo", + "package_version": "0.5.0", + "program": "rustc", + "target_kind": ["bin"], + "compile_mode": "build" + } + ] + } + "#, + ) + .run(); +} + +#[cargo_test] +fn build_plan_with_dev_dep() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build --build-plan -Zunstable-options") + .masquerade_as_nightly_cargo() + .run(); +} diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs new file mode 100644 index 00000000000..0952033bacc --- /dev/null +++ b/tests/testsuite/build_script.rs @@ -0,0 +1,3837 @@ +use std::env; +use std::fs::{self, File}; +use std::io; +use std::io::prelude::*; +use std::thread; + +use crate::support::paths::CargoPathExt; +use crate::support::registry::Package; +use crate::support::{basic_manifest, cross_compile, project}; +use crate::support::{rustc_host, sleep_ms, slow_cpu_multiplier}; +use cargo::util::paths::remove_dir_all; + +#[cargo_test] +fn custom_build_script_failed() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("build.rs", "fn main() { std::process::exit(101); }") + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc --crate-name build_script_build build.rs --color never --crate-type bin [..]` +[RUNNING] `[..]/build-script-build` +[ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])` + +Caused by: + process didn't exit successfully: `[..]/build-script-build` (exit code: 101)", + ) + .run(); +} + +#[cargo_test] +fn custom_build_env_vars() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [features] + bar_feat = ["bar/foo"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [features] + foo = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn hello() {}"); + + let file_content = format!( + r#" + use std::env; + use std::io::prelude::*; + use std::path::Path; + use std::fs; + + fn main() {{ + let _target = env::var("TARGET").unwrap(); + let _ncpus = env::var("NUM_JOBS").unwrap(); + let _dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + + let opt = env::var("OPT_LEVEL").unwrap(); + assert_eq!(opt, "0"); + + let opt = env::var("PROFILE").unwrap(); + assert_eq!(opt, "debug"); + + let debug = env::var("DEBUG").unwrap(); + assert_eq!(debug, "true"); + + let out = env::var("OUT_DIR").unwrap(); + assert!(out.starts_with(r"{0}")); + assert!(fs::metadata(&out).map(|m| m.is_dir()).unwrap_or(false)); + + let _host = env::var("HOST").unwrap(); + + let _feat = env::var("CARGO_FEATURE_FOO").unwrap(); + + let _cargo = env::var("CARGO").unwrap(); + + let rustc = env::var("RUSTC").unwrap(); + assert_eq!(rustc, "rustc"); + + let rustdoc = env::var("RUSTDOC").unwrap(); + assert_eq!(rustdoc, "rustdoc"); + + assert!(env::var("RUSTC_LINKER").is_err()); + }} + "#, + p.root() + .join("target") + .join("debug") + .join("build") + .display() + ); + + let p = p.file("bar/build.rs", &file_content).build(); + + p.cargo("build --features bar_feat").run(); +} + +#[cargo_test] +fn custom_build_env_var_rustc_linker() { + if cross_compile::disabled() { + return; + } + let target = cross_compile::alternate(); + let p = project() + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + linker = "/path/to/linker" + "#, + target + ), + ) + .file( + "build.rs", + r#" + use std::env; + + fn main() { + assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); + } + "#, + ) + .file("src/lib.rs", "") + .build(); + + // no crate type set => linker never called => build succeeds if and + // only if build.rs succeeds, despite linker binary not existing. + p.cargo("build --target").arg(&target).run(); +} + +#[cargo_test] +fn custom_build_script_wrong_rustc_flags() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "build.rs", + r#"fn main() { println!("cargo:rustc-flags=-aaa -bbb"); }"#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "[ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ([CWD])`: \ + `-aaa -bbb`", + ) + .run(); +} + +/* +#[cargo_test] +fn custom_build_script_rustc_flags() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.foo] + path = "foo" + "#, + ).file("src/main.rs", "fn main() {}") + .file( + "foo/Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + "#, + ).file("foo/src/lib.rs", "") + .file( + "foo/build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2"); + } + "#, + ).build(); + + // TODO: TEST FAILS BECAUSE OF WRONG STDOUT (but otherwise, the build works). + p.cargo("build --verbose") + .with_status(101) + .with_stderr( + "\ +[COMPILING] bar v0.5.0 ([CWD]) +[RUNNING] `rustc --crate-name test [CWD]/src/lib.rs --crate-type lib -C debuginfo=2 \ + -C metadata=[..] \ + -C extra-filename=-[..] \ + --out-dir [CWD]/target \ + --emit=[..]link \ + -L [CWD]/target \ + -L [CWD]/target/deps` +", + ).run(); +} +*/ + +#[cargo_test] +fn links_no_build_cmd() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \ +not have a custom build script +", + ) + .run(); +} + +#[cargo_test] +fn links_duplicates() { + // this tests that the links_duplicates are caught at resolver time + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + + [dependencies.a-sys] + path = "a-sys" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "") + .file( + "a-sys/Cargo.toml", + r#" + [project] + name = "a-sys" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#, + ) + .file("a-sys/src/lib.rs", "") + .file("a-sys/build.rs", "") + .build(); + + p.cargo("build").with_status(101) + .with_stderr("\ +error: failed to select a version for `a-sys`. + ... required by package `foo v0.5.0 ([..])` +versions that meet the requirements `*` are: 0.5.0 + +the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: +package `foo v0.5.0 ([..])` + +failed to select a version for `a-sys` which could resolve this conflict +").run(); +} + +#[cargo_test] +fn links_duplicates_deep_dependency() { + // this tests that the links_duplicates are caught at resolver time + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a-sys] + path = "a-sys" + "#, + ) + .file("a/src/lib.rs", "") + .file("a/build.rs", "") + .file( + "a/a-sys/Cargo.toml", + r#" + [project] + name = "a-sys" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#, + ) + .file("a/a-sys/src/lib.rs", "") + .file("a/a-sys/build.rs", "") + .build(); + + p.cargo("build").with_status(101) + .with_stderr("\ +error: failed to select a version for `a-sys`. + ... required by package `a v0.5.0 ([..])` + ... which is depended on by `foo v0.5.0 ([..])` +versions that meet the requirements `*` are: 0.5.0 + +the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: +package `foo v0.5.0 ([..])` + +failed to select a version for `a-sys` which could resolve this conflict +").run(); +} + +#[cargo_test] +fn overrides_and_links() { + let target = rustc_host(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"), + "bar"); + assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"), + "baz"); + } + "#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{}.foo] + rustc-flags = "-L foo -L bar" + foo = "bar" + bar = "baz" + "#, + target + ), + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file("a/build.rs", "not valid rust code") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[..] +[RUNNING] `rustc --crate-name foo [..] -L foo -L bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn unused_overrides() { + let target = rustc_host(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.foo] + rustc-flags = "-L foo -L bar" + foo = "bar" + bar = "baz" + "#, + target + ), + ) + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn links_passes_env_vars() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); + assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); + } + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + use std::env; + fn main() { + let lib = env::var("CARGO_MANIFEST_LINKS").unwrap(); + assert_eq!(lib, "foo"); + + println!("cargo:foo=bar"); + println!("cargo:bar=baz"); + } + "#, + ) + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn only_rerun_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .build(); + + p.cargo("build -v").run(); + p.root().move_into_the_past(); + + File::create(&p.root().join("some-new-file")).unwrap(); + p.root().move_into_the_past(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `[..]/build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn rebuild_continues_to_pass_env_vars() { + let a = project() + .at("a") + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::time::Duration; + fn main() { + println!("cargo:foo=bar"); + println!("cargo:bar=baz"); + std::thread::sleep(Duration::from_millis(500)); + } + "#, + ) + .build(); + a.root().move_into_the_past(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = '{}' + "#, + a.root().display() + ), + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); + assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); + } + "#, + ) + .build(); + + p.cargo("build -v").run(); + p.root().move_into_the_past(); + + File::create(&p.root().join("some-new-file")).unwrap(); + p.root().move_into_the_past(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn testing_and_such() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .build(); + + println!("build"); + p.cargo("build -v").run(); + p.root().move_into_the_past(); + + File::create(&p.root().join("src/lib.rs")).unwrap(); + p.root().move_into_the_past(); + + println!("test"); + p.cargo("test -vj1") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `[..]/build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]/foo-[..][EXE]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]`", + ) + .with_stdout_contains_n("running 0 tests", 2) + .run(); + + println!("doc"); + p.cargo("doc -v") + .with_stderr( + "\ +[DOCUMENTING] foo v0.5.0 ([CWD]) +[RUNNING] `rustdoc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + File::create(&p.root().join("src/main.rs")) + .unwrap() + .write_all(b"fn main() {}") + .unwrap(); + println!("run"); + p.cargo("run") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/foo[EXE]` +", + ) + .run(); +} + +#[cargo_test] +fn propagation_of_l_flags() { + let target = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + + [dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#"fn main() { println!("cargo:rustc-flags=-L bar"); }"#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("b/src/lib.rs", "") + .file("b/build.rs", "bad file") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.foo] + rustc-flags = "-L foo" + "#, + target + ), + ) + .build(); + + p.cargo("build -v -j1") + .with_stderr_contains( + "\ +[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc --crate-name foo [..] -L bar -L foo` +", + ) + .run(); +} + +#[cargo_test] +fn propagation_of_l_flags_new() { + let target = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + + [dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=bar"); + } + "#, + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("b/src/lib.rs", "") + .file("b/build.rs", "bad file") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.foo] + rustc-link-search = ["foo"] + "#, + target + ), + ) + .build(); + + p.cargo("build -v -j1") + .with_stderr_contains( + "\ +[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc --crate-name foo [..] -L bar -L foo` +", + ) + .run(); +} + +#[cargo_test] +fn build_deps_simple() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + [build-dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + " + #[allow(unused_extern_crates)] + extern crate a; + fn main() {} + ", + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] a v0.5.0 ([CWD]/a) +[RUNNING] `rustc --crate-name a [..]` +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc [..] build.rs [..] --extern a=[..]` +[RUNNING] `[..]/foo-[..]/build-script-build` +[RUNNING] `rustc --crate-name foo [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn build_deps_not_for_normal() { + let target = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + [build-dependencies.aaaaa] + path = "a" + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate aaaaa;", + ) + .file( + "build.rs", + " + #[allow(unused_extern_crates)] + extern crate aaaaa; + fn main() {} + ", + ) + .file("a/Cargo.toml", &basic_manifest("aaaaa", "0.5.0")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build -v --target") + .arg(&target) + .with_status(101) + .with_stderr_contains("[..]can't find crate for `aaaaa`[..]") + .with_stderr_contains( + "\ +[ERROR] Could not compile `foo`. + +Caused by: + process didn't exit successfully: [..] +", + ) + .run(); +} + +#[cargo_test] +fn build_cmd_with_a_build_cmd() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + " + #[allow(unused_extern_crates)] + extern crate a; + fn main() {} + ", + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + "#[allow(unused_extern_crates)] extern crate b; fn main() {}", + ) + .file("b/Cargo.toml", &basic_manifest("b", "0.5.0")) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] b v0.5.0 ([CWD]/b) +[RUNNING] `rustc --crate-name b [..]` +[COMPILING] a v0.5.0 ([CWD]/a) +[RUNNING] `rustc [..] a/build.rs [..] --extern b=[..]` +[RUNNING] `[..]/a-[..]/build-script-build` +[RUNNING] `rustc --crate-name a [..]lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..]target/debug/deps \ + -L [..]target/debug/deps` +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc --crate-name build_script_build build.rs --color never --crate-type bin \ + --emit=[..]link \ + -C debuginfo=2 -C metadata=[..] --out-dir [..] \ + -L [..]target/debug/deps \ + --extern a=[..]liba[..].rlib` +[RUNNING] `[..]/foo-[..]/build-script-build` +[RUNNING] `rustc --crate-name foo [..]lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L [..]target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn out_dir_is_preserved() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + use std::fs::File; + use std::path::Path; + fn main() { + let out = env::var("OUT_DIR").unwrap(); + File::create(Path::new(&out).join("foo")).unwrap(); + } + "#, + ) + .build(); + + // Make the file + p.cargo("build -v").run(); + p.root().move_into_the_past(); + + // Change to asserting that it's there + File::create(&p.root().join("build.rs")) + .unwrap() + .write_all( + br#" + use std::env; + use std::old_io::File; + fn main() { + let out = env::var("OUT_DIR").unwrap(); + File::open(&Path::new(&out).join("foo")).unwrap(); + } + "#, + ) + .unwrap(); + p.root().move_into_the_past(); + p.cargo("build -v").run(); + + // Run a fresh build where file should be preserved + p.cargo("build -v").run(); + + // One last time to make sure it's still there. + File::create(&p.root().join("foo")).unwrap(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn output_separate_lines() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L foo"); + println!("cargo:rustc-flags=-l static=foo"); + } + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..]/foo-[..]/build-script-build` +[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` +[ERROR] could not find native static library [..] +", + ) + .run(); +} + +#[cargo_test] +fn output_separate_lines_new() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=foo"); + println!("cargo:rustc-link-lib=static=foo"); + } + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..]/foo-[..]/build-script-build` +[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` +[ERROR] could not find native static library [..] +", + ) + .run(); +} + +#[cfg(not(windows))] // FIXME(#867) +#[cargo_test] +fn code_generation() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + r#" + include!(concat!(env!("OUT_DIR"), "/hello.rs")); + + fn main() { + println!("{}", message()); + } + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::fs::File; + use std::io::prelude::*; + use std::path::PathBuf; + + fn main() { + let dst = PathBuf::from(env::var("OUT_DIR").unwrap()); + let mut f = File::create(&dst.join("hello.rs")).unwrap(); + f.write_all(b" + pub fn message() -> &'static str { + \"Hello, World!\" + } + ").unwrap(); + } + "#, + ) + .build(); + + p.cargo("run") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/foo`", + ) + .with_stdout("Hello, World!") + .run(); + + p.cargo("test").run(); +} + +#[cargo_test] +fn release_with_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() {} + "#, + ) + .build(); + + p.cargo("build -v --release").run(); +} + +#[cargo_test] +fn build_script_only() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", r#"fn main() {}"#) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + no targets specified in the manifest + either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present", + ) + .run(); +} + +#[cargo_test] +fn shared_dep_with_a_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.a] + path = "a" + + [build-dependencies.b] + path = "b" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.5.0" + authors = [] + + [dependencies.a] + path = "../a" + "#, + ) + .file("b/src/lib.rs", "") + .build(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn transitive_dep_host() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies.b] + path = "b" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.5.0" + authors = [] + + [lib] + name = "b" + plugin = true + + [dependencies.a] + path = "../a" + "#, + ) + .file("b/src/lib.rs", "") + .build(); + p.cargo("build").run(); +} + +#[cargo_test] +fn test_a_lib_with_a_build_command() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + include!(concat!(env!("OUT_DIR"), "/foo.rs")); + + /// ``` + /// foo::bar(); + /// ``` + pub fn bar() { + assert_eq!(foo(), 1); + } + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::io::prelude::*; + use std::fs::File; + use std::path::PathBuf; + + fn main() { + let out = PathBuf::from(env::var("OUT_DIR").unwrap()); + File::create(out.join("foo.rs")).unwrap().write_all(b" + fn foo() -> i32 { 1 } + ").unwrap(); + } + "#, + ) + .build(); + p.cargo("test").run(); +} + +#[cargo_test] +fn test_dev_dep_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .build(); + + p.cargo("test").run(); +} + +#[cargo_test] +fn build_script_with_dynamic_native_dependency() { + let build = project() + .at("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + + [lib] + name = "builder" + crate-type = ["dylib"] + "#, + ) + .file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}") + .build(); + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [build-dependencies.bar] + path = "bar" + "#, + ) + .file("build.rs", "extern crate bar; fn main() { bar::bar() }") + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "bar/build.rs", + r#" + use std::env; + use std::fs; + use std::path::PathBuf; + + fn main() { + let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); + let root = PathBuf::from(env::var("BUILDER_ROOT").unwrap()); + let file = format!("{}builder{}", + env::consts::DLL_PREFIX, + env::consts::DLL_SUFFIX); + let src = root.join(&file); + let dst = out_dir.join(&file); + fs::copy(src, dst).unwrap(); + if cfg!(windows) { + fs::copy(root.join("builder.dll.lib"), + out_dir.join("builder.dll.lib")).unwrap(); + } + println!("cargo:rustc-link-search=native={}", out_dir.display()); + } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn bar() { + #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] + #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] + extern { fn foo(); } + unsafe { foo() } + } + "#, + ) + .build(); + + build + .cargo("build -v") + .env("CARGO_LOG", "cargo::ops::cargo_rustc") + .run(); + + let root = build.root().join("target").join("debug"); + foo.cargo("build -v") + .env("BUILDER_ROOT", root) + .env("CARGO_LOG", "cargo::ops::cargo_rustc") + .run(); +} + +#[cargo_test] +fn profile_and_opt_level_set_correctly() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + + fn main() { + assert_eq!(env::var("OPT_LEVEL").unwrap(), "3"); + assert_eq!(env::var("PROFILE").unwrap(), "release"); + assert_eq!(env::var("DEBUG").unwrap(), "false"); + } + "#, + ) + .build(); + p.cargo("bench").run(); +} + +#[cargo_test] +fn profile_debug_0() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [profile.dev] + debug = 0 + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + + fn main() { + assert_eq!(env::var("OPT_LEVEL").unwrap(), "0"); + assert_eq!(env::var("PROFILE").unwrap(), "debug"); + assert_eq!(env::var("DEBUG").unwrap(), "false"); + } + "#, + ) + .build(); + p.cargo("build").run(); +} + +#[cargo_test] +fn build_script_with_lto() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [profile.dev] + lto = true + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .build(); + p.cargo("build").run(); +} + +#[cargo_test] +fn test_duplicate_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + build = "build.rs" + + [dependencies.bar] + path = "bar" + + [build-dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::do_nothing() } + "#, + ) + .file( + "build.rs", + r#" + extern crate bar; + fn main() { bar::do_nothing() } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn do_nothing() {}") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn cfg_feedback() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/main.rs", "#[cfg(foo)] fn main() {}") + .file( + "build.rs", + r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, + ) + .build(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn cfg_override() { + let target = rustc_host(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#, + ) + .file("src/main.rs", "#[cfg(foo)] fn main() {}") + .file("build.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.a] + rustc-cfg = ["foo"] + "#, + target + ), + ) + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn cfg_test() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, + ) + .file( + "src/lib.rs", + r#" + /// + /// ``` + /// extern crate foo; + /// + /// fn main() { + /// foo::foo() + /// } + /// ``` + /// + #[cfg(foo)] + pub fn foo() {} + + #[cfg(foo)] + #[test] + fn test_foo() { + foo() + } + "#, + ) + .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}") + .build(); + p.cargo("test -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] [..] build.rs [..] +[RUNNING] `[..]/build-script-build` +[RUNNING] [..] --cfg foo[..] +[RUNNING] [..] --cfg foo[..] +[RUNNING] [..] --cfg foo[..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]/foo-[..][EXE]` +[RUNNING] `[..]/test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --cfg foo[..]", + ) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test_bar ... ok") + .with_stdout_contains_n("test [..] ... ok", 3) + .run(); +} + +#[cargo_test] +fn cfg_doc() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "build.rs", + r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, + ) + .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "bar/build.rs", + r#"fn main() { println!("cargo:rustc-cfg=bar"); }"#, + ) + .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}") + .build(); + p.cargo("doc").run(); + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); + assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); +} + +#[cargo_test] +fn cfg_override_test() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "a" + "#, + ) + .file("build.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}.a] + rustc-cfg = ["foo"] + "#, + rustc_host() + ), + ) + .file( + "src/lib.rs", + r#" + /// + /// ``` + /// extern crate foo; + /// + /// fn main() { + /// foo::foo() + /// } + /// ``` + /// + #[cfg(foo)] + pub fn foo() {} + + #[cfg(foo)] + #[test] + fn test_foo() { + foo() + } + "#, + ) + .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}") + .build(); + p.cargo("test -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `[..]` +[RUNNING] `[..]` +[RUNNING] `[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]/foo-[..][EXE]` +[RUNNING] `[..]/test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --cfg foo[..]", + ) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test_bar ... ok") + .with_stdout_contains_n("test [..] ... ok", 3) + .run(); +} + +#[cargo_test] +fn cfg_override_doc() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "a" + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{target}.a] + rustc-cfg = ["foo"] + [target.{target}.b] + rustc-cfg = ["bar"] + "#, + target = rustc_host() + ), + ) + .file("build.rs", "") + .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + links = "b" + "#, + ) + .file("bar/build.rs", "") + .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}") + .build(); + p.cargo("doc").run(); + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); + assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); +} + +#[cargo_test] +fn env_build() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + r#" + const FOO: &'static str = env!("FOO"); + fn main() { + println!("{}", FOO); + } + "#, + ) + .file( + "build.rs", + r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, + ) + .build(); + p.cargo("build -v").run(); + p.cargo("run -v").with_stdout("foo\n").run(); +} + +#[cargo_test] +fn env_test() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, + ) + .file( + "src/lib.rs", + r#"pub const FOO: &'static str = env!("FOO"); "#, + ) + .file( + "tests/test.rs", + r#" + extern crate foo; + + #[test] + fn test_foo() { + assert_eq!("foo", foo::FOO); + } + "#, + ) + .build(); + p.cargo("test -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] [..] build.rs [..] +[RUNNING] `[..]/build-script-build` +[RUNNING] [..] --crate-name foo[..] +[RUNNING] [..] --crate-name foo[..] +[RUNNING] [..] --crate-name test[..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]/foo-[..][EXE]` +[RUNNING] `[..]/test-[..][EXE]` +[DOCTEST] foo +[RUNNING] [..] --crate-name foo[..]", + ) + .with_stdout_contains_n("running 0 tests", 2) + .with_stdout_contains("test test_foo ... ok") + .run(); +} + +#[cargo_test] +fn env_doc() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "src/main.rs", + r#" + const FOO: &'static str = env!("FOO"); + fn main() {} + "#, + ) + .file( + "build.rs", + r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, + ) + .build(); + p.cargo("doc -v").run(); +} + +#[cargo_test] +fn flags_go_into_tests() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "../a" } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=test"); + } + "#, + ) + .build(); + + p.cargo("test -v --test=foo") + .with_stderr( + "\ +[COMPILING] a v0.5.0 ([..] +[RUNNING] `rustc [..] a/build.rs [..]` +[RUNNING] `[..]/build-script-build` +[RUNNING] `rustc [..] a/src/lib.rs [..] -L test[..]` +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]` +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] src/lib.rs [..] -L test[..]` +[RUNNING] `rustc [..] tests/foo.rs [..] -L test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]/foo-[..][EXE]`", + ) + .with_stdout_contains("running 0 tests") + .run(); + + p.cargo("test -v -pb --lib") + .with_stderr( + "\ +[FRESH] a v0.5.0 ([..] +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]/b-[..][EXE]`", + ) + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn diamond_passes_args_only_once() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + [dependencies] + b = { path = "../b" } + c = { path = "../c" } + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + c = { path = "../c" } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "c/Cargo.toml", + r#" + [project] + name = "c" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "c/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=test"); + } + "#, + ) + .file("c/src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] c v0.5.0 ([..] +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..]` +[COMPILING] b v0.5.0 ([..] +[RUNNING] `rustc [..]` +[COMPILING] a v0.5.0 ([..] +[RUNNING] `rustc [..]` +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `[..]rlib -L native=test` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn adding_an_override_invalidates() { + let target = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file(".cargo/config", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=foo"); + } + "#, + ) + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..] -L native=foo` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + File::create(p.root().join(".cargo/config")) + .unwrap() + .write_all( + format!( + " + [target.{}.foo] + rustc-link-search = [\"native=bar\"] + ", + target + ) + .as_bytes(), + ) + .unwrap(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn changing_an_override_invalidates() { + let target = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + " + [target.{}.foo] + rustc-link-search = [\"native=foo\"] + ", + target + ), + ) + .file("build.rs", "") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=foo` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + File::create(p.root().join(".cargo/config")) + .unwrap() + .write_all( + format!( + " + [target.{}.foo] + rustc-link-search = [\"native=bar\"] + ", + target + ) + .as_bytes(), + ) + .unwrap(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..] -L native=bar` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn fresh_builds_possible_with_link_libs() { + // The bug is non-deterministic. Sometimes you can get a fresh build + let target = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "nativefoo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + " + [target.{}.nativefoo] + rustc-link-lib = [\"a\"] + rustc-link-search = [\"./b\"] + rustc-flags = \"-l z -L ./\" + ", + target + ), + ) + .file("build.rs", "") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build -v") + .with_stderr( + "\ +[FRESH] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn fresh_builds_possible_with_multiple_metadata_overrides() { + // The bug is non-deterministic. Sometimes you can get a fresh build + let target = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + " + [target.{}.foo] + a = \"\" + b = \"\" + c = \"\" + d = \"\" + e = \"\" + ", + target + ), + ) + .file("build.rs", "") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..] +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build -v") + .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint=info") + .with_stderr( + "\ +[FRESH] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn rebuild_only_on_explicit_paths() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-changed=foo"); + println!("cargo:rerun-if-changed=bar"); + } + "#, + ) + .build(); + + p.cargo("build -v").run(); + + // files don't exist, so should always rerun if they don't exist + println!("run without"); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `[..]/build-script-build` +[RUNNING] `rustc [..] src/lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + sleep_ms(1000); + File::create(p.root().join("foo")).unwrap(); + File::create(p.root().join("bar")).unwrap(); + sleep_ms(1000); // make sure the to-be-created outfile has a timestamp distinct from the infiles + + // now the exist, so run once, catch the mtime, then shouldn't run again + println!("run with"); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `[..]/build-script-build` +[RUNNING] `rustc [..] src/lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + println!("run with2"); + p.cargo("build -v") + .with_stderr( + "\ +[FRESH] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + sleep_ms(1000); + + // random other files do not affect freshness + println!("run baz"); + File::create(p.root().join("baz")).unwrap(); + p.cargo("build -v") + .with_stderr( + "\ +[FRESH] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + // but changing dependent files does + println!("run foo change"); + File::create(p.root().join("foo")).unwrap(); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `[..]/build-script-build` +[RUNNING] `rustc [..] src/lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + // .. as does deleting a file + println!("run foo delete"); + fs::remove_file(p.root().join("bar")).unwrap(); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `[..]/build-script-build` +[RUNNING] `rustc [..] src/lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn doctest_receives_build_link_args() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=bar"); + } + "#, + ) + .build(); + + p.cargo("test -v") + .with_stderr_contains( + "[RUNNING] `rustdoc --test [..] --crate-name foo [..]-L native=bar[..]`", + ) + .run(); +} + +#[cargo_test] +fn please_respect_the_dag() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies] + a = { path = 'a' } + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=foo"); + } + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "bar" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=native=bar"); + } + "#, + ) + .build(); + + p.cargo("build -v") + .with_stderr_contains("[RUNNING] `rustc [..] -L native=foo -L native=bar[..]`") + .run(); +} + +#[cargo_test] +fn non_utf8_output() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + use std::io::prelude::*; + + fn main() { + let mut out = std::io::stdout(); + // print something that's not utf8 + out.write_all(b"\xff\xff\n").unwrap(); + + // now print some cargo metadata that's utf8 + println!("cargo:rustc-cfg=foo"); + + // now print more non-utf8 + out.write_all(b"\xff\xff\n").unwrap(); + } + "#, + ) + .file("src/main.rs", "#[cfg(foo)] fn main() {}") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn custom_target_dir() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + target-dir = 'test' + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("a/build.rs", "fn main() {}") + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn panic_abort_with_build_scripts() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [profile.release] + panic = 'abort' + + [dependencies] + a = { path = "a" } + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate a;", + ) + .file("build.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + build = "build.rs" + + [build-dependencies] + b = { path = "../b" } + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/build.rs", + "#[allow(unused_extern_crates)] extern crate b; fn main() {}", + ) + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("build -v --release").run(); + + p.root().join("target").rm_rf(); + + p.cargo("test --release -v") + .with_stderr_does_not_contain("[..]panic[..]") + .run(); +} + +#[cargo_test] +fn warnings_emitted() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#, + ) + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +warning: foo +warning: bar +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn warnings_hidden_for_upstream() { + Package::new("bar", "0.1.0") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#, + ) + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 +[RUNNING] `rustc [..]` +[RUNNING] `[..]` +[RUNNING] `rustc [..]` +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn warnings_printed_on_vv() { + Package::new("bar", "0.1.0") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:warning=foo"); + println!("cargo:warning=bar"); + } + "#, + ) + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -vv") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 +[RUNNING] `[..] rustc [..]` +[RUNNING] `[..]` +warning: foo +warning: bar +[RUNNING] `[..] rustc [..]` +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `[..] rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn output_shows_on_vv() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::io::prelude::*; + + fn main() { + std::io::stderr().write_all(b"stderr\n").unwrap(); + std::io::stdout().write_all(b"stdout\n").unwrap(); + } + "#, + ) + .build(); + + p.cargo("build -vv") + .with_stdout("[foo 0.5.0] stdout") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `[..] rustc [..]` +[RUNNING] `[..]` +[foo 0.5.0] stderr +[RUNNING] `[..] rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn links_with_dots() { + let target = rustc_host(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + links = "a.b" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-link-search=bar") + } + "#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{}.'a.b'] + rustc-link-search = ["foo"] + "#, + target + ), + ) + .build(); + + p.cargo("build -v") + .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..] [..] -L foo[..]`") + .run(); +} + +#[cargo_test] +fn rustc_and_rustdoc_set_correctly() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + + fn main() { + assert_eq!(env::var("RUSTC").unwrap(), "rustc"); + assert_eq!(env::var("RUSTDOC").unwrap(), "rustdoc"); + } + "#, + ) + .build(); + p.cargo("bench").run(); +} + +#[cargo_test] +fn cfg_env_vars_available() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + + fn main() { + let fam = env::var("CARGO_CFG_TARGET_FAMILY").unwrap(); + if cfg!(unix) { + assert_eq!(fam, "unix"); + } else { + assert_eq!(fam, "windows"); + } + } + "#, + ) + .build(); + p.cargo("bench").run(); +} + +#[cargo_test] +fn switch_features_rerun() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [features] + foo = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + println!(include_str!(concat!(env!("OUT_DIR"), "/output"))); + } + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::fs::File; + use std::io::Write; + use std::path::Path; + + fn main() { + let out_dir = env::var_os("OUT_DIR").unwrap(); + let out_dir = Path::new(&out_dir).join("output"); + let mut f = File::create(&out_dir).unwrap(); + + if env::var_os("CARGO_FEATURE_FOO").is_some() { + f.write_all(b"foo").unwrap(); + } else { + f.write_all(b"bar").unwrap(); + } + } + "#, + ) + .build(); + + p.cargo("build -v --features=foo").run(); + p.rename_run("foo", "with_foo").with_stdout("foo\n").run(); + p.cargo("build -v").run(); + p.rename_run("foo", "without_foo") + .with_stdout("bar\n") + .run(); + p.cargo("build -v --features=foo").run(); + p.rename_run("foo", "with_foo2").with_stdout("foo\n").run(); +} + +#[cargo_test] +fn assume_build_script_when_build_rs_present() { + let p = project() + .file( + "src/main.rs", + r#" + fn main() { + if ! cfg!(foo) { + panic!("the build script was not run"); + } + } + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#, + ) + .build(); + + p.cargo("run -v").run(); +} + +#[cargo_test] +fn if_build_set_to_false_dont_treat_build_rs_as_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = false + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + if cfg!(foo) { + panic!("the build script was run"); + } + } + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=foo"); + } + "#, + ) + .build(); + + p.cargo("run -v").run(); +} + +#[cargo_test] +fn deterministic_rustc_dependency_flags() { + // This bug is non-deterministic hence the large number of dependencies + // in the hopes it will have a much higher chance of triggering it. + + Package::new("dep1", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "dep1" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L native=test1"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("dep2", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "dep2" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L native=test2"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("dep3", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "dep3" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L native=test3"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("dep4", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "dep4" + version = "0.1.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rustc-flags=-L native=test4"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "*" + dep2 = "*" + dep3 = "*" + dep4 = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .with_stderr_contains( + "\ +[RUNNING] `rustc --crate-name foo [..] -L native=test1 -L native=test2 \ +-L native=test3 -L native=test4` +", + ) + .run(); +} + +#[cargo_test] +fn links_duplicates_with_cycle() { + // this tests that the links_duplicates are caught at resolver time + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + + [dependencies.a] + path = "a" + + [dev-dependencies] + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + links = "a" + build = "build.rs" + "#, + ) + .file("a/src/lib.rs", "") + .file("a/build.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + + [dependencies] + foo = { path = ".." } + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("build").with_status(101) + .with_stderr("\ +error: failed to select a version for `a`. + ... required by package `foo v0.5.0 ([..])` +versions that meet the requirements `*` are: 0.5.0 + +the package `a` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: +package `foo v0.5.0 ([..])` + +failed to select a version for `a` which could resolve this conflict +").run(); +} + +#[cargo_test] +fn rename_with_link_search_path() { + _rename_with_link_search_path(false); +} + +#[cargo_test] +fn rename_with_link_search_path_cross() { + if cross_compile::disabled() { + return; + } + + _rename_with_link_search_path(true); +} + +fn _rename_with_link_search_path(cross: bool) { + let target_arg = if cross { + format!(" --target={}", cross_compile::alternate()) + } else { + "".to_string() + }; + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [lib] + crate-type = ["cdylib"] + "#, + ) + .file( + "src/lib.rs", + "#[no_mangle] pub extern fn cargo_test_foo() {}", + ); + let p = p.build(); + + p.cargo(&format!("build{}", target_arg)).run(); + + let p2 = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file( + "build.rs", + r#" + use std::env; + use std::fs; + use std::path::PathBuf; + + fn main() { + // Move the `libfoo.so` from the root of our project into the + // build directory. This way Cargo should automatically manage + // `LD_LIBRARY_PATH` and such. + let root = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); + let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + let src = root.join(&file); + + let dst_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + let dst = dst_dir.join(&file); + + fs::copy(&src, &dst).unwrap(); + // handle windows, like below + drop(fs::copy(root.join("foo.dll.lib"), dst_dir.join("foo.dll.lib"))); + + println!("cargo:rerun-if-changed=build.rs"); + if cfg!(target_env = "msvc") { + println!("cargo:rustc-link-lib=foo.dll"); + } else { + println!("cargo:rustc-link-lib=foo"); + } + println!("cargo:rustc-link-search=all={}", + dst.parent().unwrap().display()); + } + "#, + ) + .file( + "src/main.rs", + r#" + extern { + #[link_name = "cargo_test_foo"] + fn foo(); + } + + fn main() { + unsafe { foo(); } + } + "#, + ); + let p2 = p2.build(); + + // Move the output `libfoo.so` into the directory of `p2`, and then delete + // the `p` project. On macOS, the `libfoo.dylib` artifact references the + // original path in `p` so we want to make sure that it can't find it (hence + // the deletion). + let root = if cross { + p.root() + .join("target") + .join(cross_compile::alternate()) + .join("debug") + .join("deps") + } else { + p.root().join("target").join("debug").join("deps") + }; + let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); + let src = root.join(&file); + + let dst = p2.root().join(&file); + + fs::copy(&src, &dst).unwrap(); + // copy the import library for windows, if it exists + drop(fs::copy( + &root.join("foo.dll.lib"), + p2.root().join("foo.dll.lib"), + )); + remove_dir_all(p.root()).unwrap(); + + // Everything should work the first time + p2.cargo(&format!("run{}", target_arg)).run(); + + // Now rename the root directory and rerun `cargo run`. Not only should we + // not build anything but we also shouldn't crash. + let mut new = p2.root(); + new.pop(); + new.push("bar2"); + + // For whatever reason on Windows right after we execute a binary it's very + // unlikely that we're able to successfully delete or rename that binary. + // It's not really clear why this is the case or if it's a bug in Cargo + // holding a handle open too long. In an effort to reduce the flakiness of + // this test though we throw this in a loop + // + // For some more information see #5481 and rust-lang/rust#48775 + let mut i = 0; + loop { + let error = match fs::rename(p2.root(), &new) { + Ok(()) => break, + Err(e) => e, + }; + i += 1; + if !cfg!(windows) || error.kind() != io::ErrorKind::PermissionDenied || i > 10 { + panic!("failed to rename: {}", error); + } + println!("assuming {} is spurious, waiting to try again", error); + thread::sleep(slow_cpu_multiplier(100)); + } + + p2.cargo(&format!("run{}", target_arg)) + .cwd(&new) + .with_stderr( + "\ +[FINISHED] [..] +[RUNNING] [..] +", + ) + .run(); +} + +#[cargo_test] +fn optional_build_script_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = { path = "bar", optional = true } + + [build-dependencies] + bar = { path = "bar", optional = true } + "#, + ) + .file( + "build.rs", + r#" + #[cfg(feature = "bar")] + extern crate bar; + + fn main() { + #[cfg(feature = "bar")] { + println!("cargo:rustc-env=FOO={}", bar::bar()); + return + } + println!("cargo:rustc-env=FOO=0"); + } + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(feature = "bar")] + extern crate bar; + + fn main() { + println!("{}", env!("FOO")); + } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }"); + let p = p.build(); + + p.cargo("run").with_stdout("0\n").run(); + p.cargo("run --features bar").with_stdout("1\n").run(); +} + +#[cargo_test] +fn optional_build_dep_and_required_normal_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "./bar", optional = true } + + [build-dependencies] + bar = { path = "./bar" } + "#, + ) + .file("build.rs", "extern crate bar; fn main() { bar::bar(); }") + .file( + "src/main.rs", + r#" + #[cfg(feature = "bar")] + extern crate bar; + + fn main() { + #[cfg(feature = "bar")] { + println!("{}", bar::bar()); + } + #[cfg(not(feature = "bar"))] { + println!("0"); + } + } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }"); + let p = p.build(); + + p.cargo("run") + .with_stdout("0") + .with_stderr( + "\ +[COMPILING] bar v0.5.0 ([..]) +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]foo[EXE]`", + ) + .run(); + + p.cargo("run --all-features") + .with_stdout("1") + .with_stderr( + "\ +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]foo[EXE]`", + ) + .run(); +} + +#[cargo_test] +fn using_rerun_if_changed_does_not_rebuild() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#, + ) + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-changed=build.rs"); + } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn links_interrupted_can_restart() { + // Test for a `links` dependent build script getting canceled and then + // restarted. Steps: + // 1. Build to establish fingerprints. + // 2. Change something (an env var in this case) that triggers the + // dependent build script to run again. Kill the top-level build script + // while it is running (such as hitting Ctrl-C). + // 3. Run the build again, it should re-run the build script. + let bar = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.5.0" + authors = [] + links = "foo" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-env-changed=SOMEVAR"); + } + "#, + ) + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + + [dependencies.bar] + path = '{}' + "#, + bar.root().display() + ), + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + println!("cargo:rebuild-if-changed=build.rs"); + if std::path::Path::new("abort").exists() { + panic!("Crash!"); + } + } + "#, + ) + .build(); + + p.cargo("build").run(); + // Simulate the user hitting Ctrl-C during a build. + p.change_file("abort", ""); + // Set SOMEVAR to trigger a rebuild. + p.cargo("build") + .env("SOMEVAR", "1") + .with_stderr_contains("[..]Crash![..]") + .with_status(101) + .run(); + fs::remove_file(p.root().join("abort")).unwrap(); + // Try again without aborting the script. + // ***This is currently broken, the script does not re-run. + p.cargo("build -v") + .env("SOMEVAR", "1") + .with_stderr_contains("[RUNNING] [..]/foo-[..]/build-script-build[..]") + .run(); +} + +#[cargo_test] +#[cfg(unix)] +fn build_script_scan_eacces() { + // build.rs causes a scan of the whole project, which can be a problem if + // a directory is not accessible. + use std::os::unix::fs::PermissionsExt; + let p = project() + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file("secrets/stuff", "") + .build(); + let path = p.root().join("secrets"); + fs::set_permissions(&path, fs::Permissions::from_mode(0)).unwrap(); + // "Caused by" is a string from libc such as the following: + // Permission denied (os error 13) + p.cargo("build") + .with_stderr( + "\ +[ERROR] cannot read \"[..]/foo/secrets\" + +Caused by: + [..] +", + ) + .with_status(101) + .run(); + fs::set_permissions(&path, fs::Permissions::from_mode(0o755)).unwrap(); +} diff --git a/tests/testsuite/build_script_env.rs b/tests/testsuite/build_script_env.rs new file mode 100644 index 00000000000..5b2a5795b66 --- /dev/null +++ b/tests/testsuite/build_script_env.rs @@ -0,0 +1,108 @@ +use std::fs::File; + +use crate::support::project; +use crate::support::sleep_ms; + +#[cargo_test] +fn rerun_if_env_changes() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-env-changed=FOO"); + } + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); + p.cargo("build") + .env("FOO", "bar") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); + p.cargo("build") + .env("FOO", "baz") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); + p.cargo("build") + .env("FOO", "baz") + .with_stderr("[FINISHED] [..]") + .run(); + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn rerun_if_env_or_file_changes() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-env-changed=FOO"); + println!("cargo:rerun-if-changed=foo"); + } + "#, + ) + .file("foo", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); + p.cargo("build") + .env("FOO", "bar") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); + p.cargo("build") + .env("FOO", "bar") + .with_stderr("[FINISHED] [..]") + .run(); + sleep_ms(1000); + File::create(p.root().join("foo")).unwrap(); + p.cargo("build") + .env("FOO", "bar") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); +} diff --git a/tests/testsuite/cache_messages.rs b/tests/testsuite/cache_messages.rs new file mode 100644 index 00000000000..b283a285f4e --- /dev/null +++ b/tests/testsuite/cache_messages.rs @@ -0,0 +1,348 @@ +use crate::support::{clippy_is_available, is_nightly, process, project, registry::Package}; +use std::path::Path; + +fn as_str(bytes: &[u8]) -> &str { + std::str::from_utf8(bytes).expect("valid utf-8") +} + +#[cargo_test] +fn simple() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + // A simple example that generates two warnings (unused functions). + let p = project() + .file( + "src/lib.rs", + " + fn a() {} + fn b() {} + ", + ) + .build(); + + let agnostic_path = Path::new("src").join("lib.rs"); + let agnostic_path_s = agnostic_path.to_str().unwrap(); + + // Capture what rustc actually emits. This is done to avoid relying on the + // exact message formatting in rustc. + let rustc_output = process("rustc") + .cwd(p.root()) + .args(&["--crate-type=lib", agnostic_path_s]) + .exec_with_output() + .expect("rustc to run"); + + assert!(rustc_output.stdout.is_empty()); + assert!(rustc_output.status.success()); + + // -q so the output is the same as rustc (no "Compiling" or "Finished"). + let cargo_output1 = p + .cargo("check -Zcache-messages -q --color=never") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output1.stderr)); + assert!(cargo_output1.stdout.is_empty()); + // Check that the cached version is exactly the same. + let cargo_output2 = p + .cargo("check -Zcache-messages -q") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(as_str(&rustc_output.stderr), as_str(&cargo_output2.stderr)); + assert!(cargo_output2.stdout.is_empty()); +} + +#[cargo_test] +fn color() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + // Check enabling/disabling color. + let p = project().file("src/lib.rs", "fn a() {}").build(); + + let agnostic_path = Path::new("src").join("lib.rs"); + let agnostic_path_s = agnostic_path.to_str().unwrap(); + // Capture the original color output. + let rustc_output = process("rustc") + .cwd(p.root()) + .args(&["--crate-type=lib", agnostic_path_s, "--color=always"]) + .exec_with_output() + .expect("rustc to run"); + assert!(rustc_output.status.success()); + let rustc_color = as_str(&rustc_output.stderr); + assert!(rustc_color.contains("\x1b[")); + + // Capture the original non-color output. + let rustc_output = process("rustc") + .cwd(p.root()) + .args(&["--crate-type=lib", agnostic_path_s]) + .exec_with_output() + .expect("rustc to run"); + let rustc_nocolor = as_str(&rustc_output.stderr); + assert!(!rustc_nocolor.contains("\x1b[")); + + // First pass, non-cached, with color, should be the same. + let cargo_output1 = p + .cargo("check -Zcache-messages -q --color=always") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(rustc_color, as_str(&cargo_output1.stderr)); + + // Replay cached, with color. + let cargo_output2 = p + .cargo("check -Zcache-messages -q --color=always") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(rustc_color, as_str(&cargo_output2.stderr)); + + // Replay cached, no color. + let cargo_output_nocolor = p + .cargo("check -Zcache-messages -q --color=never") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(rustc_nocolor, as_str(&cargo_output_nocolor.stderr)); +} + +#[cargo_test] +fn cached_as_json() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + // Check that cached JSON output is the same. + let p = project().file("src/lib.rs", "fn a() {}").build(); + + // Grab the non-cached output, feature disabled. + // NOTE: When stabilizing, this will need to be redone. + let cargo_output = p + .cargo("check --message-format=json") + .exec_with_output() + .expect("cargo to run"); + assert!(cargo_output.status.success()); + let orig_cargo_out = as_str(&cargo_output.stdout); + assert!(orig_cargo_out.contains("compiler-message")); + p.cargo("clean").run(); + + // Check JSON output, not fresh. + let cargo_output1 = p + .cargo("check -Zcache-messages --message-format=json") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + assert_eq!(as_str(&cargo_output1.stdout), orig_cargo_out); + + // Check JSON output, fresh. + let cargo_output2 = p + .cargo("check -Zcache-messages --message-format=json") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo to run"); + // The only difference should be this field. + let fix_fresh = as_str(&cargo_output2.stdout).replace("\"fresh\":true", "\"fresh\":false"); + assert_eq!(fix_fresh, orig_cargo_out); +} + +#[cargo_test] +fn clears_cache_after_fix() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + // Make sure the cache is invalidated when there is no output. + let p = project().file("src/lib.rs", "fn asdf() {}").build(); + // Fill the cache. + p.cargo("check -Zcache-messages") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[..]asdf[..]") + .run(); + let cpath = p + .glob("target/debug/.fingerprint/foo-*/output") + .next() + .unwrap() + .unwrap(); + assert!(std::fs::read_to_string(cpath).unwrap().contains("asdf")); + + // Fix it. + p.change_file("src/lib.rs", ""); + + p.cargo("check -Zcache-messages") + .masquerade_as_nightly_cargo() + .with_stdout("") + .with_stderr( + "\ +[CHECKING] foo [..] +[FINISHED] [..] +", + ) + .run(); + assert_eq!(p.glob("target/debug/.fingerprint/foo-*/output").count(), 0); + + // And again, check the cache is correct. + p.cargo("check -Zcache-messages") + .masquerade_as_nightly_cargo() + .with_stdout("") + .with_stderr( + "\ +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn rustdoc() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + // Create a warning in rustdoc. + let p = project() + .file( + "src/lib.rs", + " + #![warn(private_doc_tests)] + /// asdf + /// ``` + /// let x = 1; + /// ``` + fn f() {} + ", + ) + .build(); + + // At this time, rustdoc does not support --json-rendered=termcolor. So it + // will always be uncolored with -Zcache-messages. + let rustdoc_output = p + .cargo("doc -Zcache-messages -q") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("rustdoc to run"); + assert!(rustdoc_output.status.success()); + let rustdoc_stderr = as_str(&rustdoc_output.stderr); + assert!(rustdoc_stderr.contains("private")); + // Invert this when --json-rendered is added. + assert!(!rustdoc_stderr.contains("\x1b[")); + assert_eq!(p.glob("target/debug/.fingerprint/foo-*/output").count(), 1); + + // Check the cached output. + let rustdoc_output = p + .cargo("doc -Zcache-messages -q") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("rustdoc to run"); + assert_eq!(as_str(&rustdoc_output.stderr), rustdoc_stderr); +} + +#[cargo_test] +fn fix() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + // Make sure `fix` is not broken by caching. + let p = project().file("src/lib.rs", "pub fn try() {}").build(); + + p.cargo("fix --edition --allow-no-vcs -Zcache-messages") + .masquerade_as_nightly_cargo() + .run(); + + assert_eq!(p.read_file("src/lib.rs"), "pub fn r#try() {}"); +} + +#[cargo_test] +fn clippy() { + if !is_nightly() { + // --json-rendered is unstable + eprintln!("skipping test: requires nightly"); + return; + } + + if !clippy_is_available() { + return; + } + + // Caching clippy output. + // This is just a random clippy lint (assertions_on_constants) that + // hopefully won't change much in the future. + let p = project() + .file("src/lib.rs", "pub fn f() { assert!(true); }") + .build(); + + p.cargo("clippy-preview -Zunstable-options -Zcache-messages") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[..]assert!(true)[..]") + .run(); + + // Again, reading from the cache. + p.cargo("clippy-preview -Zunstable-options -Zcache-messages") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[..]assert!(true)[..]") + .run(); + + // FIXME: Unfortunately clippy is sharing the same hash with check. This + // causes the cache to be reused when it shouldn't. + p.cargo("check -Zcache-messages") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[..]assert!(true)[..]") // This should not be here. + .run(); +} + +#[cargo_test] +fn very_verbose() { + if !is_nightly() { + // --json-rendered is unstable + return; + } + // Handle cap-lints in dependencies. + Package::new("bar", "1.0.0") + .file("src/lib.rs", "fn not_used() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = "1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("check -Zcache-messages -vv") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[..]not_used[..]") + .run(); + + p.cargo("check -Zcache-messages") + .masquerade_as_nightly_cargo() + .with_stderr("[FINISHED] [..]") + .run(); + + p.cargo("check -Zcache-messages -vv") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[..]not_used[..]") + .run(); +} + +#[cargo_test] +fn short_incompatible() { + let p = project().file("src/lib.rs", "").build(); + p.cargo("check -Zcache-messages --message-format=short") + .masquerade_as_nightly_cargo() + .with_stderr( + "[ERROR] currently `--message-format short` is incompatible with cached output", + ) + .with_status(101) + .run(); +} diff --git a/tests/testsuite/cargo_alias_config.rs b/tests/testsuite/cargo_alias_config.rs new file mode 100644 index 00000000000..3d0e14c127c --- /dev/null +++ b/tests/testsuite/cargo_alias_config.rs @@ -0,0 +1,176 @@ +use crate::support::{basic_bin_manifest, project}; + +#[cargo_test] +fn alias_incorrect_config_type() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = 5 + "#, + ) + .build(); + + p.cargo("b-cargo-test -v") + .with_status(101) + .with_stderr_contains( + "\ +[ERROR] invalid configuration for key `alias.b-cargo-test` +expected a list, but found a integer for [..]", + ) + .run(); +} + +#[cargo_test] +fn alias_config() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = "build" + "#, + ) + .build(); + + p.cargo("b-cargo-test -v") + .with_stderr_contains( + "\ +[COMPILING] foo v0.5.0 [..] +[RUNNING] `rustc --crate-name foo [..]", + ) + .run(); +} + +#[cargo_test] +fn recursive_alias() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r"fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = "build" + a-cargo-test = ["b-cargo-test", "-v"] + "#, + ) + .build(); + + p.cargo("a-cargo-test") + .with_stderr_contains( + "\ +[COMPILING] foo v0.5.0 [..] +[RUNNING] `rustc --crate-name foo [..]", + ) + .run(); +} + +#[cargo_test] +fn alias_list_test() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = ["build", "--release"] + "#, + ) + .build(); + + p.cargo("b-cargo-test -v") + .with_stderr_contains("[COMPILING] foo v0.5.0 [..]") + .with_stderr_contains("[RUNNING] `rustc --crate-name [..]") + .run(); +} + +#[cargo_test] +fn alias_with_flags_config() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + b-cargo-test = "build --release" + "#, + ) + .build(); + + p.cargo("b-cargo-test -v") + .with_stderr_contains("[COMPILING] foo v0.5.0 [..]") + .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]") + .run(); +} + +#[cargo_test] +fn alias_cannot_shadow_builtin_command() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + build = "fetch" + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn alias_override_builtin_alias() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [alias] + b = "run" + "#, + ) + .build(); + + p.cargo("b") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/foo[EXE]` +", + ) + .run(); +} + +#[cargo_test] +fn builtin_alias_takes_options() { + // #6381 + let p = project() + .file("src/lib.rs", "") + .file( + "examples/ex1.rs", + r#"fn main() { println!("{}", std::env::args().skip(1).next().unwrap()) }"#, + ) + .build(); + + p.cargo("r --example ex1 -- asdf").with_stdout("asdf").run(); +} diff --git a/tests/testsuite/cargo_command.rs b/tests/testsuite/cargo_command.rs new file mode 100644 index 00000000000..dbc53bf0631 --- /dev/null +++ b/tests/testsuite/cargo_command.rs @@ -0,0 +1,338 @@ +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; +use std::str; + +use crate::support::cargo_process; +use crate::support::paths::{self, CargoPathExt}; +use crate::support::registry::Package; +use crate::support::{basic_bin_manifest, basic_manifest, cargo_exe, project, Project}; +use cargo; + +#[cfg_attr(windows, allow(dead_code))] +enum FakeKind<'a> { + Executable, + Symlink { target: &'a Path }, +} + +/// Adds an empty file with executable flags (and platform-dependent suffix). +// +// TODO: move this to `Project` if other cases using this emerge. +fn fake_file(proj: Project, dir: &Path, name: &str, kind: &FakeKind<'_>) -> Project { + let path = proj + .root() + .join(dir) + .join(&format!("{}{}", name, env::consts::EXE_SUFFIX)); + path.parent().unwrap().mkdir_p(); + match *kind { + FakeKind::Executable => { + File::create(&path).unwrap(); + make_executable(&path); + } + FakeKind::Symlink { target } => { + make_symlink(&path, target); + } + } + return proj; + + #[cfg(unix)] + fn make_executable(p: &Path) { + use std::os::unix::prelude::*; + + let mut perms = fs::metadata(p).unwrap().permissions(); + let mode = perms.mode(); + perms.set_mode(mode | 0o111); + fs::set_permissions(p, perms).unwrap(); + } + #[cfg(windows)] + fn make_executable(_: &Path) {} + #[cfg(unix)] + fn make_symlink(p: &Path, t: &Path) { + ::std::os::unix::fs::symlink(t, p).expect("Failed to create symlink"); + } + #[cfg(windows)] + fn make_symlink(_: &Path, _: &Path) { + panic!("Not supported") + } +} + +fn path() -> Vec { + env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect() +} + +#[cargo_test] +fn list_commands_with_descriptions() { + let p = project().build(); + p.cargo("--list") + .with_stdout_contains( + " build Compile a local package and all of its dependencies", + ) + // Assert that `read-manifest` prints the right one-line description followed by another + // command, indented. + .with_stdout_contains( + " read-manifest Print a JSON representation of a Cargo.toml manifest.", + ) + .run(); +} + +#[cargo_test] +fn list_command_looks_at_path() { + let proj = project().build(); + let proj = fake_file( + proj, + Path::new("path-test"), + "cargo-1", + &FakeKind::Executable, + ); + + let mut path = path(); + path.push(proj.root().join("path-test")); + let path = env::join_paths(path.iter()).unwrap(); + let output = cargo_process("-v --list") + .env("PATH", &path) + .exec_with_output() + .unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!( + output.contains("\n 1 "), + "missing 1: {}", + output + ); +} + +// Windows and symlinks don't currently mix well. +#[cfg(unix)] +#[cargo_test] +fn list_command_resolves_symlinks() { + let proj = project().build(); + let proj = fake_file( + proj, + Path::new("path-test"), + "cargo-2", + &FakeKind::Symlink { + target: &cargo_exe(), + }, + ); + + let mut path = path(); + path.push(proj.root().join("path-test")); + let path = env::join_paths(path.iter()).unwrap(); + let output = cargo_process("-v --list") + .env("PATH", &path) + .exec_with_output() + .unwrap(); + let output = str::from_utf8(&output.stdout).unwrap(); + assert!( + output.contains("\n 2 "), + "missing 2: {}", + output + ); +} + +#[cargo_test] +fn find_closest_biuld_to_build() { + cargo_process("biuld") + .with_status(101) + .with_stderr_contains( + "\ +error: no such subcommand: `biuld` + +Did you mean `build`? +", + ) + .run(); + + // But, if we actually have `biuld`, it must work! + // https://github.com/rust-lang/cargo/issues/5201 + Package::new("cargo-biuld", "1.0.0") + .file( + "src/main.rs", + r#" + fn main() { + println!("Similar, but not identical to, build"); + } + "#, + ) + .publish(); + + cargo_process("install cargo-biuld").run(); + cargo_process("biuld") + .with_stdout("Similar, but not identical to, build\n") + .run(); + cargo_process("--list") + .with_stdout_contains( + " build Compile a local package and all of its dependencies\n", + ) + .with_stdout_contains(" biuld\n") + .run(); +} + +// If a subcommand is more than an edit distance of 3 away, we don't make a suggestion. +#[cargo_test] +fn find_closest_dont_correct_nonsense() { + cargo_process("there-is-no-way-that-there-is-a-command-close-to-this") + .cwd(&paths::root()) + .with_status(101) + .with_stderr( + "[ERROR] no such subcommand: \ + `there-is-no-way-that-there-is-a-command-close-to-this` +", + ) + .run(); +} + +#[cargo_test] +fn displays_subcommand_on_error() { + cargo_process("invalid-command") + .with_status(101) + .with_stderr("[ERROR] no such subcommand: `invalid-command`\n") + .run(); +} + +#[cargo_test] +fn override_cargo_home() { + let root = paths::root(); + let my_home = root.join("my_home"); + fs::create_dir(&my_home).unwrap(); + File::create(&my_home.join("config")) + .unwrap() + .write_all( + br#" + [cargo-new] + name = "foo" + email = "bar" + git = false + "#, + ) + .unwrap(); + + cargo_process("new foo") + .env("USER", "foo") + .env("CARGO_HOME", &my_home) + .run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo "]"#)); +} + +#[cargo_test] +fn cargo_subcommand_env() { + let src = format!( + r#" + use std::env; + + fn main() {{ + println!("{{}}", env::var("{}").unwrap()); + }} + "#, + cargo::CARGO_ENV + ); + + let p = project() + .at("cargo-envtest") + .file("Cargo.toml", &basic_bin_manifest("cargo-envtest")) + .file("src/main.rs", &src) + .build(); + + let target_dir = p.target_debug_dir(); + + p.cargo("build").run(); + assert!(p.bin("cargo-envtest").is_file()); + + let cargo = cargo_exe().canonicalize().unwrap(); + let mut path = path(); + path.push(target_dir); + let path = env::join_paths(path.iter()).unwrap(); + + cargo_process("envtest") + .env("PATH", &path) + .with_stdout(cargo.to_str().unwrap()) + .run(); +} + +#[cargo_test] +fn cargo_subcommand_args() { + let p = project() + .at("cargo-foo") + .file("Cargo.toml", &basic_manifest("cargo-foo", "0.0.1")) + .file( + "src/main.rs", + r#" + fn main() { + let args: Vec<_> = ::std::env::args().collect(); + println!("{:?}", args); + } + "#, + ) + .build(); + + p.cargo("build").run(); + let cargo_foo_bin = p.bin("cargo-foo"); + assert!(cargo_foo_bin.is_file()); + + let mut path = path(); + path.push(p.target_debug_dir()); + let path = env::join_paths(path.iter()).unwrap(); + + cargo_process("foo bar -v --help") + .env("PATH", &path) + .with_stdout( + r#"["[CWD]/cargo-foo/target/debug/cargo-foo[EXE]", "foo", "bar", "-v", "--help"]"#, + ) + .run(); +} + +#[cargo_test] +fn cargo_help() { + cargo_process("").run(); + cargo_process("help").run(); + cargo_process("-h").run(); + cargo_process("help build").run(); + cargo_process("build -h").run(); + cargo_process("help help").run(); +} + +#[cargo_test] +fn cargo_help_external_subcommand() { + Package::new("cargo-fake-help", "1.0.0") + .file( + "src/main.rs", + r#" + fn main() { + if ::std::env::args().nth(2) == Some(String::from("--help")) { + println!("fancy help output"); + } + }"#, + ) + .publish(); + cargo_process("install cargo-fake-help").run(); + cargo_process("help fake-help") + .with_stdout("fancy help output\n") + .run(); +} + +#[cargo_test] +fn explain() { + cargo_process("--explain E0001") + .with_stdout_contains( + "This error suggests that the expression arm corresponding to the noted pattern", + ) + .run(); +} + +// Test that the output of `cargo -Z help` shows a different help screen with +// all the `-Z` flags. +#[cargo_test] +fn z_flags_help() { + cargo_process("-Z help") + .with_stdout_contains( + " -Z unstable-options -- Allow the usage of unstable options such as --registry", + ) + .run(); +} diff --git a/tests/testsuite/cargo_features.rs b/tests/testsuite/cargo_features.rs new file mode 100644 index 00000000000..0dd4af06aa6 --- /dev/null +++ b/tests/testsuite/cargo_features.rs @@ -0,0 +1,325 @@ +use crate::support::{project, registry}; + +#[cargo_test] +fn feature_required() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the `im-a-teapot` manifest key is unstable and may not work properly in England + +Caused by: + feature `test-dummy-unstable` is required + +consider adding `cargo-features = [\"test-dummy-unstable\"]` to the manifest +", + ) + .run(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the `im-a-teapot` manifest key is unstable and may not work properly in England + +Caused by: + feature `test-dummy-unstable` is required + +this Cargo does not support nightly features, but if you +switch to nightly channel you can add +`cargo-features = [\"test-dummy-unstable\"]` to enable this feature +", + ) + .run(); +} + +#[cargo_test] +fn unknown_feature() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["foo"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + unknown cargo feature `foo` +", + ) + .run(); +} + +#[cargo_test] +fn stable_feature_warns() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-stable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build") + .with_stderr( + "\ +warning: the cargo feature `test-dummy-stable` is now stable and is no longer \ +necessary to be listed in the manifest +[COMPILING] a [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn nightly_feature_requires_nightly() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +See [..] +", + ) + .run(); +} + +#[cargo_test] +fn nightly_feature_requires_nightly_in_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("a/src/lib.rs", "") + .build(); + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] a [..] +[COMPILING] b [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to load source for a dependency on `a` + +Caused by: + Unable to update [..] + +Caused by: + failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +See [..] +", + ) + .run(); +} + +#[cargo_test] +fn cant_publish() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ + but this is the `stable` channel +See [..] +", + ) + .run(); +} + +#[cargo_test] +fn z_flags_rejected() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + im-a-teapot = true + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build -Zprint-im-a-teapot") + .with_status(101) + .with_stderr( + "error: the `-Z` flag is only accepted on the nightly \ + channel of Cargo, but this is the `stable` channel\n\ + See [..]", + ) + .run(); + + p.cargo("build -Zarg") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr("error: unknown `-Z` flag specified: arg") + .run(); + + p.cargo("build -Zprint-im-a-teapot") + .masquerade_as_nightly_cargo() + .with_stdout("im-a-teapot = true\n") + .with_stderr( + "\ +[COMPILING] a [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn publish_allowed() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "a" + version = "0.0.1" + authors = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("publish --index") + .arg(registry::registry_url().to_string()) + .masquerade_as_nightly_cargo() + .run(); +} diff --git a/tests/testsuite/cfg.rs b/tests/testsuite/cfg.rs new file mode 100644 index 00000000000..4f5eb2fa793 --- /dev/null +++ b/tests/testsuite/cfg.rs @@ -0,0 +1,448 @@ +use std::fmt; +use std::str::FromStr; + +use crate::support::registry::Package; +use crate::support::rustc_host; +use crate::support::{basic_manifest, project}; +use cargo::util::{Cfg, CfgExpr}; + +macro_rules! c { + ($a:ident) => { + Cfg::Name(stringify!($a).to_string()) + }; + ($a:ident = $e:expr) => { + Cfg::KeyPair(stringify!($a).to_string(), $e.to_string()) + }; +} + +macro_rules! e { + (any($($t:tt),*)) => (CfgExpr::Any(vec![$(e!($t)),*])); + (all($($t:tt),*)) => (CfgExpr::All(vec![$(e!($t)),*])); + (not($($t:tt)*)) => (CfgExpr::Not(Box::new(e!($($t)*)))); + (($($t:tt)*)) => (e!($($t)*)); + ($($t:tt)*) => (CfgExpr::Value(c!($($t)*))); +} + +fn good(s: &str, expected: T) +where + T: FromStr + PartialEq + fmt::Debug, + T::Err: fmt::Display, +{ + let c = match T::from_str(s) { + Ok(c) => c, + Err(e) => panic!("failed to parse `{}`: {}", s, e), + }; + assert_eq!(c, expected); +} + +fn bad(s: &str, err: &str) +where + T: FromStr + fmt::Display, + T::Err: fmt::Display, +{ + let e = match T::from_str(s) { + Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg), + Err(e) => e.to_string(), + }; + assert!( + e.contains(err), + "when parsing `{}`,\n\"{}\" not contained \ + inside: {}", + s, + err, + e + ); +} + +#[cargo_test] +fn cfg_syntax() { + good("foo", c!(foo)); + good("_bar", c!(_bar)); + good(" foo", c!(foo)); + good(" foo ", c!(foo)); + good(" foo = \"bar\"", c!(foo = "bar")); + good("foo=\"\"", c!(foo = "")); + good(" foo=\"3\" ", c!(foo = "3")); + good("foo = \"3 e\"", c!(foo = "3 e")); +} + +#[cargo_test] +fn cfg_syntax_bad() { + bad::("", "found nothing"); + bad::(" ", "found nothing"); + bad::("\t", "unexpected character"); + bad::("7", "unexpected character"); + bad::("=", "expected identifier"); + bad::(",", "expected identifier"); + bad::("(", "expected identifier"); + bad::("foo (", "malformed cfg value"); + bad::("bar =", "expected a string"); + bad::("bar = \"", "unterminated string"); + bad::("foo, bar", "malformed cfg value"); +} + +#[cargo_test] +fn cfg_expr() { + good("foo", e!(foo)); + good("_bar", e!(_bar)); + good(" foo", e!(foo)); + good(" foo ", e!(foo)); + good(" foo = \"bar\"", e!(foo = "bar")); + good("foo=\"\"", e!(foo = "")); + good(" foo=\"3\" ", e!(foo = "3")); + good("foo = \"3 e\"", e!(foo = "3 e")); + + good("all()", e!(all())); + good("all(a)", e!(all(a))); + good("all(a, b)", e!(all(a, b))); + good("all(a, )", e!(all(a))); + good("not(a = \"b\")", e!(not(a = "b"))); + good("not(all(a))", e!(not(all(a)))); +} + +#[cargo_test] +fn cfg_expr_bad() { + bad::(" ", "found nothing"); + bad::(" all", "expected `(`"); + bad::("all(a", "expected `)`"); + bad::("not", "expected `(`"); + bad::("not(a", "expected `)`"); + bad::("a = ", "expected a string"); + bad::("all(not())", "expected identifier"); + bad::("foo(a)", "consider using all() or any() explicitly"); +} + +#[cargo_test] +fn cfg_matches() { + assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)])); + assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)])); + assert!(e!(any(foo, bar)).matches(&[c!(bar)])); + assert!(e!(any(foo, bar)).matches(&[c!(foo)])); + assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)])); + assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)])); + assert!(e!(not(foo)).matches(&[c!(bar)])); + assert!(e!(not(foo)).matches(&[])); + assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)])); + assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)])); + + assert!(!e!(foo).matches(&[])); + assert!(!e!(foo).matches(&[c!(bar)])); + assert!(!e!(foo).matches(&[c!(fo)])); + assert!(!e!(any(foo)).matches(&[])); + assert!(!e!(any(foo)).matches(&[c!(bar)])); + assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)])); + assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)])); + assert!(!e!(all(foo, bar)).matches(&[c!(bar)])); + assert!(!e!(all(foo, bar)).matches(&[c!(foo)])); + assert!(!e!(all(foo, bar)).matches(&[])); + assert!(!e!(not(bar)).matches(&[c!(bar)])); + assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)])); + assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)])); +} + +#[cargo_test] +fn cfg_easy() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.dependencies] + b = { path = 'b' } + [target."cfg(windows)".dependencies] + b = { path = 'b' } + "#, + ) + .file("src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn dont_include() { + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.dependencies] + b = {{ path = 'b' }} + "#, + other_family + ), + ) + .file("src/lib.rs", "") + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + p.cargo("build") + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn works_through_the_registry() { + Package::new("baz", "0.1.0").publish(); + Package::new("bar", "0.1.0") + .target_dep("baz", "0.1.0", "cfg(unix)") + .target_dep("baz", "0.1.0", "cfg(windows)") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate bar;", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] [..] index +[DOWNLOADING] crates ... +[DOWNLOADED] [..] +[DOWNLOADED] [..] +[COMPILING] baz v0.1.0 +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn ignore_version_from_other_platform() { + let this_family = if cfg!(unix) { "unix" } else { "windows" }; + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.2.0").publish(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.dependencies] + bar = "0.1.0" + + [target.'cfg({})'.dependencies] + bar = "0.2.0" + "#, + this_family, other_family + ), + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate bar;", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] [..] index +[DOWNLOADING] crates ... +[DOWNLOADED] [..] +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn bad_target_spec() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.'cfg(4)'.dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + failed to parse `4` as a cfg expression + +Caused by: + unexpected character in cfg `4`, [..] +", + ) + .run(); +} + +#[cargo_test] +fn bad_target_spec2() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.'cfg(bar =)'.dependencies] + baz = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + failed to parse `bar =` as a cfg expression + +Caused by: + expected a string, found nothing +", + ) + .run(); +} + +#[cargo_test] +fn multiple_match_ok() { + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.dependencies] + b = {{ path = 'b' }} + [target.'cfg(target_family = "unix")'.dependencies] + b = {{ path = 'b' }} + [target."cfg(windows)".dependencies] + b = {{ path = 'b' }} + [target.'cfg(target_family = "windows")'.dependencies] + b = {{ path = 'b' }} + [target."cfg(any(windows, unix))".dependencies] + b = {{ path = 'b' }} + + [target.{}.dependencies] + b = {{ path = 'b' }} + "#, + rustc_host() + ), + ) + .file("src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn any_ok() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target."cfg(any(windows, unix))".dependencies] + b = { path = 'b' } + "#, + ) + .file("src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + p.cargo("build -v").run(); +} + +// https://github.com/rust-lang/cargo/issues/5313 +#[cargo_test] +#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))] +fn cfg_looks_at_rustflags_for_target() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(with_b)'.dependencies] + b = { path = 'b' } + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(with_b)] + extern crate b; + + fn main() { b::foo(); } + "#, + ) + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "pub fn foo() {}") + .build(); + + p.cargo("build --target x86_64-unknown-linux-gnu") + .env("RUSTFLAGS", "--cfg with_b") + .run(); +} diff --git a/tests/testsuite/cfg_features.rs b/tests/testsuite/cfg_features.rs new file mode 100644 index 00000000000..403a324686e --- /dev/null +++ b/tests/testsuite/cfg_features.rs @@ -0,0 +1,224 @@ +use crate::support::project; + +#[cargo_test] +fn syntax() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + b = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn bb() {} + "#, + ) + .build(); + p.cargo("build") + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn include_by_param() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + c = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + #[cfg(feature = "c")] + pub const BB: usize = 1; + + pub fn bb() -> Result<(), ()> { if BB > 0 { Ok(()) } else { Err(()) } } + "#, + ) + .build(); + p.cargo(format!("build --features {}", if cfg!(unix) { "b" } else { "c" }).as_str()) + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn dont_include_by_platform() { + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.features] + b = [] + "#, + other_family + ), + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + + pub fn bb() { let _ = BB; } + "#, + ) + .build(); + p.cargo("build --features b -vv") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +#[cargo_test] +fn dont_include_by_param() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(unix)'.features] + b = [] + [target.'cfg(windows)'.features] + c = [] + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + #[cfg(feature = "c")] + pub const BB: usize = 1; + + pub fn bb() -> Result<(), ()> { if BB > 0 { Ok(()) } else { Err(()) } } + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +#[cargo_test] +fn dont_include_default() { + let other_family = if cfg!(unix) { "windows" } else { "unix" }; + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg({})'.features] + b = [] + + [features] + default = ["b"] + "#, + other_family + ), + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "b")] + pub const BB: usize = 0; + + pub fn bb() { let _ = BB; } + "#, + ) + .build(); + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "\ + error[E0425]: cannot find value `BB` in this scope", + ) + .run(); +} + +// https://github.com/rust-lang/cargo/issues/5313 +#[cargo_test] +#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))] +fn cfg_looks_at_rustflags_for_target() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.'cfg(with_b)'.features] + b = [] + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(with_b)] + pub const BB: usize = 0; + + fn main() { let _ = BB; } + "#, + ) + .build(); + + p.cargo("build --target x86_64-unknown-linux-gnu") + .env("RUSTFLAGS", "--cfg with_b") + .run(); +} diff --git a/tests/check-style.sh b/tests/testsuite/check-style.sh similarity index 100% rename from tests/check-style.sh rename to tests/testsuite/check-style.sh diff --git a/tests/testsuite/check.rs b/tests/testsuite/check.rs new file mode 100644 index 00000000000..6d931f34090 --- /dev/null +++ b/tests/testsuite/check.rs @@ -0,0 +1,761 @@ +use std::fmt::{self, Write}; + +use crate::support::install::exe; +use crate::support::paths::CargoPathExt; +use crate::support::registry::Package; +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn check_success() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + "extern crate bar; fn main() { ::bar::baz(); }", + ) + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("check").run(); +} + +#[cargo_test] +fn check_fail() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + "extern crate bar; fn main() { ::bar::baz(42); }", + ) + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("check") + .with_status(101) + .with_stderr_contains("[..]this function takes 0 parameters but 1 parameter was supplied") + .run(); +} + +#[cargo_test] +fn custom_derive() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" +#[macro_use] +extern crate bar; + +trait B { + fn b(&self); +} + +#[derive(B)] +struct A; + +fn main() { + let a = A; + a.b(); +} +"#, + ) + .build(); + let _bar = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" +extern crate proc_macro; + +use proc_macro::TokenStream; + +#[proc_macro_derive(B)] +pub fn derive(_input: TokenStream) -> TokenStream { + format!("impl B for A {{ fn b(&self) {{}} }}").parse().unwrap() +} +"#, + ) + .build(); + + foo.cargo("check").run(); +} + +#[cargo_test] +fn check_build() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + "extern crate bar; fn main() { ::bar::baz(); }", + ) + .build(); + + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("check").run(); + foo.cargo("build").run(); +} + +#[cargo_test] +fn build_check() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + "extern crate bar; fn main() { ::bar::baz(); }", + ) + .build(); + + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("build -v").run(); + foo.cargo("check -v").run(); +} + +// Checks that where a project has both a lib and a bin, the lib is only checked +// not built. +#[cargo_test] +fn issue_3418() { + let foo = project() + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + foo.cargo("check -v") + .with_stderr_contains("[..] --emit=[..]metadata [..]") + .run(); +} + +// Some weirdness that seems to be caused by a crate being built as well as +// checked, but in this case with a proc macro too. +#[cargo_test] +fn issue_3419() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + rustc-serialize = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate rustc_serialize; + + use rustc_serialize::Decodable; + + pub fn take() {} + "#, + ) + .file( + "src/main.rs", + r#" + extern crate rustc_serialize; + + extern crate foo; + + #[derive(RustcDecodable)] + pub struct Foo; + + fn main() { + foo::take::(); + } + "#, + ) + .build(); + + Package::new("rustc-serialize", "1.0.0") + .file( + "src/lib.rs", + r#"pub trait Decodable: Sized { + fn decode(d: &mut D) -> Result; + } + pub trait Decoder { + type Error; + fn read_struct(&mut self, s_name: &str, len: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + } "#, + ) + .publish(); + + p.cargo("check").run(); +} + +// Check on a dylib should have a different metadata hash than build. +#[cargo_test] +fn dylib_check_preserves_build_cache() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [lib] + crate-type = ["dylib"] + + [dependencies] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[..]Compiling foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("check").run(); + + p.cargo("build") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); +} + +// test `cargo rustc --profile check` +#[cargo_test] +fn rustc_check() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + "extern crate bar; fn main() { ::bar::baz(); }", + ) + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("rustc --profile check -- --emit=metadata").run(); +} + +#[cargo_test] +fn rustc_check_err() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + "extern crate bar; fn main() { ::bar::qux(); }", + ) + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("rustc --profile check -- --emit=metadata") + .with_status(101) + .with_stderr_contains("[CHECKING] bar [..]") + .with_stderr_contains("[CHECKING] foo [..]") + .with_stderr_contains("[..]cannot find function `qux` in module `bar`") + .run(); +} + +#[cargo_test] +fn check_all() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + [dependencies] + b = { path = "b" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/a.rs", "fn main() {}") + .file("tests/a.rs", "") + .file("src/lib.rs", "") + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/main.rs", "fn main() {}") + .file("b/src/lib.rs", "") + .build(); + + p.cargo("check --all -v") + .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") + .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") + .with_stderr_contains("[..] --crate-name b b/src/lib.rs [..]") + .with_stderr_contains("[..] --crate-name b b/src/main.rs [..]") + .run(); +} + +#[cargo_test] +fn check_virtual_all_implied() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("check -v") + .with_stderr_contains("[..] --crate-name bar bar/src/lib.rs [..]") + .with_stderr_contains("[..] --crate-name baz baz/src/lib.rs [..]") + .run(); +} + +#[cargo_test] +fn exclude_warns_on_non_existing_package() { + let p = project().file("src/lib.rs", "").build(); + p.cargo("check --all --exclude bar") + .with_stdout("") + .with_stderr( + r#"[WARNING] excluded package(s) bar not found in workspace `[CWD]` +[CHECKING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +"#, + ) + .run(); +} + +#[cargo_test] +fn targets_selected_default() { + let foo = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", "pub fn smth() {}") + .file("examples/example1.rs", "fn main() {}") + .file("tests/test2.rs", "#[test] fn t() {}") + .file("benches/bench3.rs", "") + .build(); + + foo.cargo("check -v") + .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") + .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") + .with_stderr_does_not_contain("[..] --crate-name example1 examples/example1.rs [..]") + .with_stderr_does_not_contain("[..] --crate-name test2 tests/test2.rs [..]") + .with_stderr_does_not_contain("[..] --crate-name bench3 benches/bench3.rs [..]") + .run(); +} + +#[cargo_test] +fn targets_selected_all() { + let foo = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", "pub fn smth() {}") + .file("examples/example1.rs", "fn main() {}") + .file("tests/test2.rs", "#[test] fn t() {}") + .file("benches/bench3.rs", "") + .build(); + + foo.cargo("check --all-targets -v") + .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") + .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") + .with_stderr_contains("[..] --crate-name example1 examples/example1.rs [..]") + .with_stderr_contains("[..] --crate-name test2 tests/test2.rs [..]") + .with_stderr_contains("[..] --crate-name bench3 benches/bench3.rs [..]") + .run(); +} + +#[cargo_test] +fn check_unit_test_profile() { + let foo = project() + .file( + "src/lib.rs", + r#" + #[cfg(test)] + mod tests { + #[test] + fn it_works() { + badtext + } + } + "#, + ) + .build(); + + foo.cargo("check").run(); + foo.cargo("check --profile test") + .with_status(101) + .with_stderr_contains("[..]badtext[..]") + .run(); +} + +// Verify what is checked with various command-line filters. +#[cargo_test] +fn check_filters() { + let p = project() + .file( + "src/lib.rs", + r#" + fn unused_normal_lib() {} + #[cfg(test)] + mod tests { + fn unused_unit_lib() {} + } + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + fn unused_normal_bin() {} + #[cfg(test)] + mod tests { + fn unused_unit_bin() {} + } + "#, + ) + .file( + "tests/t1.rs", + r#" + fn unused_normal_t1() {} + #[cfg(test)] + mod tests { + fn unused_unit_t1() {} + } + "#, + ) + .file( + "examples/ex1.rs", + r#" + fn main() {} + fn unused_normal_ex1() {} + #[cfg(test)] + mod tests { + fn unused_unit_ex1() {} + } + "#, + ) + .file( + "benches/b1.rs", + r#" + fn unused_normal_b1() {} + #[cfg(test)] + mod tests { + fn unused_unit_b1() {} + } + "#, + ) + .build(); + + p.cargo("check") + .with_stderr_contains("[..]unused_normal_lib[..]") + .with_stderr_contains("[..]unused_normal_bin[..]") + .with_stderr_does_not_contain("[..]unused_normal_t1[..]") + .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") + .with_stderr_does_not_contain("[..]unused_normal_b1[..]") + .with_stderr_does_not_contain("[..]unused_unit_[..]") + .run(); + p.root().join("target").rm_rf(); + p.cargo("check --tests -v") + .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --test [..]") + .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --crate-type lib [..]") + .with_stderr_contains("[..] --crate-name foo src/main.rs [..] --test [..]") + .with_stderr_contains("[..]unused_unit_lib[..]") + .with_stderr_contains("[..]unused_unit_bin[..]") + .with_stderr_contains("[..]unused_normal_lib[..]") + .with_stderr_contains("[..]unused_normal_bin[..]") + .with_stderr_contains("[..]unused_unit_t1[..]") + .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") + .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") + .with_stderr_does_not_contain("[..]unused_normal_b1[..]") + .with_stderr_does_not_contain("[..]unused_unit_b1[..]") + .with_stderr_does_not_contain("[..]--crate-type bin[..]") + .run(); + p.root().join("target").rm_rf(); + p.cargo("check --test t1 -v") + .with_stderr_contains("[..]unused_normal_lib[..]") + .with_stderr_contains("[..]unused_unit_t1[..]") + .with_stderr_does_not_contain("[..]unused_unit_lib[..]") + .with_stderr_does_not_contain("[..]unused_normal_bin[..]") + .with_stderr_does_not_contain("[..]unused_unit_bin[..]") + .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") + .with_stderr_does_not_contain("[..]unused_normal_b1[..]") + .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") + .with_stderr_does_not_contain("[..]unused_unit_b1[..]") + .run(); + p.root().join("target").rm_rf(); + p.cargo("check --all-targets -v") + .with_stderr_contains("[..]unused_normal_lib[..]") + .with_stderr_contains("[..]unused_normal_bin[..]") + .with_stderr_contains("[..]unused_normal_t1[..]") + .with_stderr_contains("[..]unused_normal_ex1[..]") + .with_stderr_contains("[..]unused_normal_b1[..]") + .with_stderr_contains("[..]unused_unit_b1[..]") + .with_stderr_contains("[..]unused_unit_t1[..]") + .with_stderr_contains("[..]unused_unit_lib[..]") + .with_stderr_contains("[..]unused_unit_bin[..]") + .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") + .run(); +} + +#[cargo_test] +fn check_artifacts() { + // Verify which artifacts are created when running check (#4059). + let p = project() + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("tests/t1.rs", "") + .file("examples/ex1.rs", "fn main() {}") + .file("benches/b1.rs", "") + .build(); + + p.cargo("check").run(); + assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); + assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); + assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); + assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 2); + + p.root().join("target").rm_rf(); + p.cargo("check --lib").run(); + assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); + assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); + assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); + assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); + + p.root().join("target").rm_rf(); + p.cargo("check --bin foo").run(); + assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); + assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); + assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); + assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 2); + + p.root().join("target").rm_rf(); + p.cargo("check --test t1").run(); + assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); + assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); + assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); + assert_eq!(p.glob("target/debug/t1-*").count(), 0); + assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); + assert_eq!(p.glob("target/debug/deps/libt1-*.rmeta").count(), 1); + + p.root().join("target").rm_rf(); + p.cargo("check --example ex1").run(); + assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); + assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); + assert!(!p + .root() + .join("target/debug/examples") + .join(exe("ex1")) + .is_file()); + assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); + assert_eq!(p.glob("target/debug/examples/libex1-*.rmeta").count(), 1); + + p.root().join("target").rm_rf(); + p.cargo("check --bench b1").run(); + assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); + assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); + assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); + assert_eq!(p.glob("target/debug/b1-*").count(), 0); + assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); + assert_eq!(p.glob("target/debug/deps/libb1-*.rmeta").count(), 1); +} + +#[cargo_test] +fn short_message_format() { + let foo = project() + .file("src/lib.rs", "fn foo() { let _x: bool = 'a'; }") + .build(); + foo.cargo("check --message-format=short") + .with_status(101) + .with_stderr_contains( + "\ +src/lib.rs:1:27: error[E0308]: mismatched types +error: aborting due to previous error +error: Could not compile `foo`. +", + ) + .run(); +} + +#[cargo_test] +fn proc_macro() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "demo" + version = "0.0.1" + + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate proc_macro; + + use proc_macro::TokenStream; + + #[proc_macro_derive(Foo)] + pub fn demo(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .file( + "src/main.rs", + r#" + #[macro_use] + extern crate demo; + + #[derive(Foo)] + struct A; + + fn main() {} + "#, + ) + .build(); + p.cargo("check -v").env("CARGO_LOG", "cargo=trace").run(); +} + +#[cargo_test] +fn does_not_use_empty_rustc_wrapper() { + let p = project().file("src/lib.rs", "").build(); + p.cargo("check").env("RUSTC_WRAPPER", "").run(); +} + +#[cargo_test] +fn error_from_deep_recursion() -> Result<(), fmt::Error> { + let mut big_macro = String::new(); + writeln!(big_macro, "macro_rules! m {{")?; + for i in 0..130 { + writeln!(big_macro, "({}) => {{ m!({}); }};", i, i + 1)?; + } + writeln!(big_macro, "}}")?; + writeln!(big_macro, "m!(0);")?; + + let p = project().file("src/lib.rs", &big_macro).build(); + p.cargo("check --message-format=json") + .with_status(101) + .with_stdout_contains( + "[..]\"message\":\"recursion limit reached while expanding the macro `m`\"[..]", + ) + .run(); + + Ok(()) +} diff --git a/tests/testsuite/clean.rs b/tests/testsuite/clean.rs new file mode 100644 index 00000000000..0608c2e9985 --- /dev/null +++ b/tests/testsuite/clean.rs @@ -0,0 +1,320 @@ +use std::env; + +use crate::support::registry::Package; +use crate::support::{basic_bin_manifest, basic_manifest, git, main_file, project}; + +#[cargo_test] +fn cargo_clean_simple() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("build").run(); + assert!(p.build_dir().is_dir()); + + p.cargo("clean").run(); + assert!(!p.build_dir().is_dir()); +} + +#[cargo_test] +fn different_dir() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("src/bar/a.rs", "") + .build(); + + p.cargo("build").run(); + assert!(p.build_dir().is_dir()); + + p.cargo("clean").cwd("src").with_stdout("").run(); + assert!(!p.build_dir().is_dir()); +} + +#[cargo_test] +fn clean_multiple_packages() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [[bin]] + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", &basic_bin_manifest("d1")) + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file("d2/Cargo.toml", &basic_bin_manifest("d2")) + .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + p.cargo("build -p d1 -p d2 -p foo").run(); + + let d1_path = &p + .build_dir() + .join("debug") + .join(format!("d1{}", env::consts::EXE_SUFFIX)); + let d2_path = &p + .build_dir() + .join("debug") + .join(format!("d2{}", env::consts::EXE_SUFFIX)); + + assert!(p.bin("foo").is_file()); + assert!(d1_path.is_file()); + assert!(d2_path.is_file()); + + p.cargo("clean -p d1 -p d2") + .cwd("src") + .with_stdout("") + .run(); + assert!(p.bin("foo").is_file()); + assert!(!d1_path.is_file()); + assert!(!d2_path.is_file()); +} + +#[cargo_test] +fn clean_release() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build --release").run(); + + p.cargo("clean -p foo").run(); + p.cargo("build --release").with_stdout("").run(); + + p.cargo("clean -p foo --release").run(); + p.cargo("build --release") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +", + ) + .run(); + + p.cargo("build").run(); + + p.cargo("clean").arg("--release").run(); + assert!(p.build_dir().is_dir()); + assert!(p.build_dir().join("debug").is_dir()); + assert!(!p.build_dir().join("release").is_dir()); +} + +#[cargo_test] +fn clean_doc() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("doc").run(); + + let doc_path = &p.build_dir().join("doc"); + + assert!(doc_path.is_dir()); + + p.cargo("clean --doc").run(); + + assert!(!doc_path.is_dir()); + assert!(p.build_dir().is_dir()); +} + +#[cargo_test] +fn build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "build.rs", + r#" + use std::path::PathBuf; + use std::env; + + fn main() { + let out = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + if env::var("FIRST").is_ok() { + std::fs::File::create(out.join("out")).unwrap(); + } else { + assert!(!std::fs::metadata(out.join("out")).is_ok()); + } + } + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build").env("FIRST", "1").run(); + p.cargo("clean -p foo").run(); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..]build-script-build` +[RUNNING] `rustc [..] src/main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn clean_git() { + let git = git::new("dep", |project| { + project + .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + dep = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); + p.cargo("clean -p dep").with_stdout("").run(); + p.cargo("build").run(); +} + +#[cargo_test] +fn registry() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + + p.cargo("build").run(); + p.cargo("clean -p bar").with_stdout("").run(); + p.cargo("build").run(); +} + +#[cargo_test] +fn clean_verbose() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + + p.cargo("build").run(); + p.cargo("clean -p bar --verbose") + .with_stderr( + "\ +[REMOVING] [..] +[REMOVING] [..] +[REMOVING] [..] +", + ) + .run(); + p.cargo("build").run(); +} + +#[cargo_test] +fn clean_remove_rlib_rmeta() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + assert!(p.target_debug_dir().join("libfoo.rlib").exists()); + let rmeta = p.glob("target/debug/deps/*.rmeta").next().unwrap().unwrap(); + assert!(rmeta.exists()); + p.cargo("clean -p foo").run(); + assert!(!p.target_debug_dir().join("libfoo.rlib").exists()); + assert!(!rmeta.exists()); +} diff --git a/tests/testsuite/clippy.rs b/tests/testsuite/clippy.rs new file mode 100644 index 00000000000..8348d2816ac --- /dev/null +++ b/tests/testsuite/clippy.rs @@ -0,0 +1,40 @@ +use crate::support::{clippy_is_available, project, registry::Package}; + +#[cargo_test] +// Clippy should never be considered fresh. +fn clippy_force_rebuild() { + if !clippy_is_available() { + return; + } + + Package::new("dep1", "0.1.0").publish(); + + // This is just a random clippy lint (assertions_on_constants) that + // hopefully won't change much in the future. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + dep1 = "0.1" + "#, + ) + .file("src/lib.rs", "pub fn f() { assert!(true); }") + .build(); + + p.cargo("clippy-preview -Zunstable-options -v") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[..]assert!(true)[..]") + .run(); + + // Make sure it runs again. + p.cargo("clippy-preview -Zunstable-options -v") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[FRESH] dep1 v0.1.0") + .with_stderr_contains("[..]assert!(true)[..]") + .run(); +} diff --git a/tests/testsuite/collisions.rs b/tests/testsuite/collisions.rs new file mode 100644 index 00000000000..a9a24b7bd26 --- /dev/null +++ b/tests/testsuite/collisions.rs @@ -0,0 +1,149 @@ +use crate::support::{basic_manifest, project}; +use std::env; + +#[cargo_test] +fn collision_dylib() { + // Path dependencies don't include metadata hash in filename for dylibs. + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "1.0.0" + + [lib] + crate-type = ["dylib"] + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "1.0.0" + + [lib] + crate-type = ["dylib"] + name = "a" + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + // `j=1` is required because on Windows you'll get an error due to + // two processes writing to the file at the same time. + p.cargo("build -j=1") + .with_stderr_contains(&format!("\ +[WARNING] output filename collision. +The lib target `a` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the lib target `a` in package `a v1.0.0 ([..]/foo/a)`. +Colliding filename is: [..]/foo/target/debug/deps/{}a{} +The targets should have unique names. +Consider changing their names to be unique or compiling them separately. +This may become a hard error in the future; see . +", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX)) + .run(); +} + +#[cargo_test] +fn collision_example() { + // Examples in a workspace can easily collide. + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "1.0.0")) + .file("a/examples/ex1.rs", "fn main() {}") + .file("b/Cargo.toml", &basic_manifest("b", "1.0.0")) + .file("b/examples/ex1.rs", "fn main() {}") + .build(); + + p.cargo("build --examples") + .with_stderr_contains("\ +[WARNING] output filename collision. +The example target `ex1` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the example target `ex1` in package `a v1.0.0 ([..]/foo/a)`. +Colliding filename is: [..]/foo/target/debug/examples/ex1[EXE] +The targets should have unique names. +Consider changing their names to be unique or compiling them separately. +This may become a hard error in the future; see . +") + .run(); +} + +#[cargo_test] +fn collision_export() { + // `--out-dir` combines some things which can cause conflicts. + let p = project() + .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) + .file("examples/foo.rs", "fn main() {}") + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --out-dir=out -Z unstable-options --bins --examples") + .masquerade_as_nightly_cargo() + .with_stderr_contains("\ +[WARNING] `--out-dir` filename collision. +The example target `foo` in package `foo v1.0.0 ([..]/foo)` has the same output filename as the bin target `foo` in package `foo v1.0.0 ([..]/foo)`. +Colliding filename is: [..]/foo/out/foo[EXE] +The exported filenames should be unique. +Consider changing their names to be unique or compiling them separately. +This may become a hard error in the future; see . +") + .run(); +} + +#[cargo_test] +fn collision_doc() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + foo2 = { path = "foo2" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo2/Cargo.toml", + r#" + [package] + name = "foo2" + version = "0.1.0" + + [lib] + name = "foo" + "#, + ) + .file("foo2/src/lib.rs", "") + .build(); + + p.cargo("doc") + .with_stderr_contains( + "\ +[WARNING] output filename collision. +The lib target `foo` in package `foo2 v0.1.0 ([..]/foo/foo2)` has the same output \ +filename as the lib target `foo` in package `foo v0.1.0 ([..]/foo)`. +Colliding filename is: [..]/foo/target/doc/foo/index.html +The targets should have unique names. +Consider changing their names to be unique or compiling them separately. +This may become a hard error in the future; see . +", + ) + .run(); +} diff --git a/tests/testsuite/concurrent.rs b/tests/testsuite/concurrent.rs new file mode 100644 index 00000000000..3528fb031f2 --- /dev/null +++ b/tests/testsuite/concurrent.rs @@ -0,0 +1,531 @@ +use std::fs::{self, File}; +use std::io::Write; +use std::net::TcpListener; +use std::process::Stdio; +use std::sync::mpsc::channel; +use std::thread; +use std::{env, str}; + +use crate::support::cargo_process; +use crate::support::git; +use crate::support::install::{assert_has_installed_exe, cargo_home}; +use crate::support::registry::Package; +use crate::support::{basic_manifest, execs, project, slow_cpu_multiplier}; +use git2; + +fn pkg(name: &str, vers: &str) { + Package::new(name, vers) + .file("src/main.rs", "fn main() {{}}") + .publish(); +} + +#[cargo_test] +fn multiple_installs() { + let p = project() + .no_manifest() + .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0")) + .file("a/src/main.rs", "fn main() {}") + .file("b/Cargo.toml", &basic_manifest("bar", "0.0.0")) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("install").cwd("a").build_command(); + let mut b = p.cargo("install").cwd("b").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + execs().run_output(&a); + execs().run_output(&b); + + assert_has_installed_exe(cargo_home(), "foo"); + assert_has_installed_exe(cargo_home(), "bar"); +} + +#[cargo_test] +fn concurrent_installs() { + const LOCKED_BUILD: &str = "waiting for file lock on build directory"; + + pkg("foo", "0.0.1"); + pkg("bar", "0.0.1"); + + let mut a = cargo_process("install foo").build_command(); + let mut b = cargo_process("install bar").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + assert!(!str::from_utf8(&a.stderr).unwrap().contains(LOCKED_BUILD)); + assert!(!str::from_utf8(&b.stderr).unwrap().contains(LOCKED_BUILD)); + + execs().run_output(&a); + execs().run_output(&b); + + assert_has_installed_exe(cargo_home(), "foo"); + assert_has_installed_exe(cargo_home(), "bar"); +} + +#[cargo_test] +fn one_install_should_be_bad() { + let p = project() + .no_manifest() + .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0")) + .file("a/src/main.rs", "fn main() {}") + .file("b/Cargo.toml", &basic_manifest("foo", "0.0.0")) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("install").cwd("a").build_command(); + let mut b = p.cargo("install").cwd("b").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + let (bad, good) = if a.status.code() == Some(101) { + (a, b) + } else { + (b, a) + }; + execs() + .with_status(101) + .with_stderr_contains( + "[ERROR] binary `foo[..]` already exists in destination as part of `[..]`", + ) + .run_output(&bad); + execs() + .with_stderr_contains("warning: be sure to add `[..]` to your PATH [..]") + .run_output(&good); + + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn multiple_registry_fetches() { + let mut pkg = Package::new("bar", "1.0.2"); + for i in 0..10 { + let name = format!("foo{}", i); + Package::new(&name, "1.0.0").publish(); + pkg.dep(&name, "*"); + } + pkg.publish(); + + let p = project() + .no_manifest() + .file( + "a/Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + bar = "*" + "#, + ) + .file("a/src/main.rs", "fn main() {}") + .file( + "b/Cargo.toml", + r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + bar = "*" + "#, + ) + .file("b/src/main.rs", "fn main() {}"); + let p = p.build(); + + let mut a = p.cargo("build").cwd("a").build_command(); + let mut b = p.cargo("build").cwd("b").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + execs().run_output(&a); + execs().run_output(&b); + + let suffix = env::consts::EXE_SUFFIX; + assert!(p + .root() + .join("a/target/debug") + .join(format!("foo{}", suffix)) + .is_file()); + assert!(p + .root() + .join("b/target/debug") + .join(format!("bar{}", suffix)) + .is_file()); +} + +#[cargo_test] +fn git_same_repo_different_tags() { + let a = git::new("dep", |project| { + project + .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) + .file("src/lib.rs", "pub fn tag1() {}") + }) + .unwrap(); + + let repo = git2::Repository::open(&a.root()).unwrap(); + git::tag(&repo, "tag1"); + + File::create(a.root().join("src/lib.rs")) + .unwrap() + .write_all(b"pub fn tag2() {}") + .unwrap(); + git::add(&repo); + git::commit(&repo); + git::tag(&repo, "tag2"); + + let p = project() + .no_manifest() + .file( + "a/Cargo.toml", + &format!( + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}', tag = 'tag1' }} + "#, + a.url() + ), + ) + .file( + "a/src/main.rs", + "extern crate dep; fn main() { dep::tag1(); }", + ) + .file( + "b/Cargo.toml", + &format!( + r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}', tag = 'tag2' }} + "#, + a.url() + ), + ) + .file( + "b/src/main.rs", + "extern crate dep; fn main() { dep::tag2(); }", + ); + let p = p.build(); + + let mut a = p.cargo("build -v").cwd("a").build_command(); + let mut b = p.cargo("build -v").cwd("b").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + execs().run_output(&a); + execs().run_output(&b); +} + +#[cargo_test] +fn git_same_branch_different_revs() { + let a = git::new("dep", |project| { + project + .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) + .file("src/lib.rs", "pub fn f1() {}") + }) + .unwrap(); + + let p = project() + .no_manifest() + .file( + "a/Cargo.toml", + &format!( + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file( + "a/src/main.rs", + "extern crate dep; fn main() { dep::f1(); }", + ) + .file( + "b/Cargo.toml", + &format!( + r#" + [package] + name = "bar" + authors = [] + version = "0.0.0" + + [dependencies] + dep = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file( + "b/src/main.rs", + "extern crate dep; fn main() { dep::f2(); }", + ); + let p = p.build(); + + // Generate a Cargo.lock pointing at the current rev, then clear out the + // target directory + p.cargo("build").cwd("a").run(); + fs::remove_dir_all(p.root().join("a/target")).unwrap(); + + // Make a new commit on the master branch + let repo = git2::Repository::open(&a.root()).unwrap(); + File::create(a.root().join("src/lib.rs")) + .unwrap() + .write_all(b"pub fn f2() {}") + .unwrap(); + git::add(&repo); + git::commit(&repo); + + // Now run both builds in parallel. The build of `b` should pick up the + // newest commit while the build of `a` should use the locked old commit. + let mut a = p.cargo("build").cwd("a").build_command(); + let mut b = p.cargo("build").cwd("b").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + execs().run_output(&a); + execs().run_output(&b); +} + +#[cargo_test] +fn same_project() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", ""); + let p = p.build(); + + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build").build_command(); + + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + execs().run_output(&a); + execs().run_output(&b); +} + +// Make sure that if Cargo dies while holding a lock that it's released and the +// next Cargo to come in will take over cleanly. +// older win versions don't support job objects, so skip test there +#[cargo_test] +#[cfg_attr(target_os = "windows", ignore)] +fn killing_cargo_releases_the_lock() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + build = "build.rs" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "build.rs", + r#" + use std::net::TcpStream; + + fn main() { + if std::env::var("A").is_ok() { + TcpStream::connect(&std::env::var("ADDR").unwrap()[..]) + .unwrap(); + std::thread::sleep(std::time::Duration::new(10, 0)); + } + } + "#, + ); + let p = p.build(); + + // Our build script will connect to our local TCP socket to inform us that + // it's started and that's how we know that `a` will have the lock + // when we kill it. + let l = TcpListener::bind("127.0.0.1:0").unwrap(); + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build").build_command(); + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + a.env("ADDR", l.local_addr().unwrap().to_string()) + .env("A", "a"); + b.env("ADDR", l.local_addr().unwrap().to_string()) + .env_remove("A"); + + // Spawn `a`, wait for it to get to the build script (at which point the + // lock is held), then kill it. + let mut a = a.spawn().unwrap(); + l.accept().unwrap(); + a.kill().unwrap(); + + // Spawn `b`, then just finish the output of a/b the same way the above + // tests does. + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + // We killed `a`, so it shouldn't succeed, but `b` should have succeeded. + assert!(!a.status.success()); + execs().run_output(&b); +} + +#[cargo_test] +fn debug_release_ok() { + let p = project().file("src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build").run(); + fs::remove_dir_all(p.root().join("target")).unwrap(); + + let mut a = p.cargo("build").build_command(); + let mut b = p.cargo("build --release").build_command(); + a.stdout(Stdio::piped()).stderr(Stdio::piped()); + b.stdout(Stdio::piped()).stderr(Stdio::piped()); + let a = a.spawn().unwrap(); + let b = b.spawn().unwrap(); + let a = thread::spawn(move || a.wait_with_output().unwrap()); + let b = b.wait_with_output().unwrap(); + let a = a.join().unwrap(); + + execs() + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run_output(&a); + execs() + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 [..] +[FINISHED] release [optimized] target(s) in [..] +", + ) + .run_output(&b); +} + +#[cargo_test] +fn no_deadlock_with_git_dependencies() { + let dep1 = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let dep2 = git::new("dep2", |project| { + project + .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + + [dependencies] + dep1 = {{ git = '{}' }} + dep2 = {{ git = '{}' }} + "#, + dep1.url(), + dep2.url() + ), + ) + .file("src/main.rs", "fn main() { }"); + let p = p.build(); + + let n_concurrent_builds = 5; + + let (tx, rx) = channel(); + for _ in 0..n_concurrent_builds { + let cmd = p + .cargo("build") + .build_command() + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn(); + let tx = tx.clone(); + thread::spawn(move || { + let result = cmd.unwrap().wait_with_output().unwrap(); + tx.send(result).unwrap() + }); + } + + for _ in 0..n_concurrent_builds { + let result = rx.recv_timeout(slow_cpu_multiplier(30)).expect("Deadlock!"); + execs().run_output(&result); + } +} diff --git a/tests/testsuite/config.rs b/tests/testsuite/config.rs new file mode 100644 index 00000000000..01f79271a92 --- /dev/null +++ b/tests/testsuite/config.rs @@ -0,0 +1,701 @@ +use std::borrow::Borrow; +use std::collections; +use std::fs; + +use crate::support::{paths, project}; +use cargo::core::{enable_nightly_features, Shell}; +use cargo::util::config::{self, Config}; +use cargo::util::toml::{self, VecStringOrBool as VSOB}; +use serde::Deserialize; + +fn lines_match(a: &str, b: &str) -> bool { + // Perform a small amount of normalization for filesystem paths before we + // send this to the `lines_match` function. + crate::support::lines_match(&a.replace("\\", "/"), &b.replace("\\", "/")) +} + +#[cargo_test] +fn read_env_vars_for_config() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.0" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + fn main() { + assert_eq!(env::var("NUM_JOBS").unwrap(), "100"); + } + "#, + ) + .build(); + + p.cargo("build").env("CARGO_BUILD_JOBS", "100").run(); +} + +fn write_config(config: &str) { + let path = paths::root().join(".cargo/config"); + fs::create_dir_all(path.parent().unwrap()).unwrap(); + fs::write(path, config).unwrap(); +} + +fn new_config(env: &[(&str, &str)]) -> Config { + enable_nightly_features(); // -Z advanced-env + let output = Box::new(fs::File::create(paths::root().join("shell.out")).unwrap()); + let shell = Shell::from_write(output); + let cwd = paths::root(); + let homedir = paths::home(); + let env = env + .iter() + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + let mut config = Config::new(shell, cwd, homedir); + config.set_env(env); + config + .configure( + 0, + None, + &None, + false, + false, + false, + &None, + &["advanced-env".into()], + ) + .unwrap(); + config +} + +fn assert_error>(error: E, msgs: &str) { + let causes = error + .borrow() + .iter_chain() + .map(|e| e.to_string()) + .collect::>() + .join("\n"); + if !lines_match(msgs, &causes) { + panic!( + "Did not find expected:\n{}\nActual error:\n{}\n", + msgs, causes + ); + } +} + +#[cargo_test] +fn get_config() { + write_config( + "\ +[S] +f1 = 123 +", + ); + + let config = new_config(&[]); + + #[derive(Debug, Deserialize, Eq, PartialEq)] + struct S { + f1: Option, + } + let s: S = config.get("S").unwrap(); + assert_eq!(s, S { f1: Some(123) }); + let config = new_config(&[("CARGO_S_F1", "456")]); + let s: S = config.get("S").unwrap(); + assert_eq!(s, S { f1: Some(456) }); +} + +#[cargo_test] +fn config_unused_fields() { + write_config( + "\ +[S] +unused = 456 +", + ); + + let config = new_config(&[("CARGO_S_UNUSED2", "1"), ("CARGO_S2_UNUSED", "2")]); + + #[derive(Debug, Deserialize, Eq, PartialEq)] + struct S { + f1: Option, + } + // This prints a warning (verified below). + let s: S = config.get("S").unwrap(); + assert_eq!(s, S { f1: None }); + // This does not print anything, we cannot easily/reliably warn for + // environment variables. + let s: S = config.get("S2").unwrap(); + assert_eq!(s, S { f1: None }); + + // Verify the warnings. + drop(config); // Paranoid about flushing the file. + let path = paths::root().join("shell.out"); + let output = fs::read_to_string(path).unwrap(); + let expected = "\ +warning: unused key `S.unused` in config file `[..]/.cargo/config` +"; + if !lines_match(expected, &output) { + panic!( + "Did not find expected:\n{}\nActual error:\n{}\n", + expected, output + ); + } +} + +#[cargo_test] +fn config_load_toml_profile() { + write_config( + "\ +[profile.dev] +opt-level = 's' +lto = true +codegen-units=4 +debug = true +debug-assertions = true +rpath = true +panic = 'abort' +overflow-checks = true +incremental = true + +[profile.dev.build-override] +opt-level = 1 + +[profile.dev.overrides.bar] +codegen-units = 9 +", + ); + + let config = new_config(&[ + ("CARGO_PROFILE_DEV_CODEGEN_UNITS", "5"), + ("CARGO_PROFILE_DEV_BUILD_OVERRIDE_CODEGEN_UNITS", "11"), + ("CARGO_PROFILE_DEV_OVERRIDES_env_CODEGEN_UNITS", "13"), + ("CARGO_PROFILE_DEV_OVERRIDES_bar_OPT_LEVEL", "2"), + ]); + + // TODO: don't use actual `tomlprofile`. + let p: toml::TomlProfile = config.get("profile.dev").unwrap(); + let mut overrides = collections::BTreeMap::new(); + let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("bar").unwrap()); + let o_profile = toml::TomlProfile { + opt_level: Some(toml::TomlOptLevel("2".to_string())), + lto: None, + codegen_units: Some(9), + debug: None, + debug_assertions: None, + rpath: None, + panic: None, + overflow_checks: None, + incremental: None, + overrides: None, + build_override: None, + }; + overrides.insert(key, o_profile); + let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("env").unwrap()); + let o_profile = toml::TomlProfile { + opt_level: None, + lto: None, + codegen_units: Some(13), + debug: None, + debug_assertions: None, + rpath: None, + panic: None, + overflow_checks: None, + incremental: None, + overrides: None, + build_override: None, + }; + overrides.insert(key, o_profile); + + assert_eq!( + p, + toml::TomlProfile { + opt_level: Some(toml::TomlOptLevel("s".to_string())), + lto: Some(toml::StringOrBool::Bool(true)), + codegen_units: Some(5), + debug: Some(toml::U32OrBool::Bool(true)), + debug_assertions: Some(true), + rpath: Some(true), + panic: Some("abort".to_string()), + overflow_checks: Some(true), + incremental: Some(true), + overrides: Some(overrides), + build_override: Some(Box::new(toml::TomlProfile { + opt_level: Some(toml::TomlOptLevel("1".to_string())), + lto: None, + codegen_units: Some(11), + debug: None, + debug_assertions: None, + rpath: None, + panic: None, + overflow_checks: None, + incremental: None, + overrides: None, + build_override: None + })) + } + ); +} + +#[cargo_test] +fn config_deserialize_any() { + // Some tests to exercise deserialize_any for deserializers that need to + // be told the format. + write_config( + "\ +a = true +b = ['b'] +c = ['c'] +", + ); + + let config = new_config(&[ + ("CARGO_ENVB", "false"), + ("CARGO_C", "['d']"), + ("CARGO_ENVL", "['a', 'b']"), + ]); + + let a = config.get::("a").unwrap(); + match a { + VSOB::VecString(_) => panic!("expected bool"), + VSOB::Bool(b) => assert_eq!(b, true), + } + let b = config.get::("b").unwrap(); + match b { + VSOB::VecString(l) => assert_eq!(l, vec!["b".to_string()]), + VSOB::Bool(_) => panic!("expected list"), + } + let c = config.get::("c").unwrap(); + match c { + VSOB::VecString(l) => assert_eq!(l, vec!["c".to_string(), "d".to_string()]), + VSOB::Bool(_) => panic!("expected list"), + } + let envb = config.get::("envb").unwrap(); + match envb { + VSOB::VecString(_) => panic!("expected bool"), + VSOB::Bool(b) => assert_eq!(b, false), + } + let envl = config.get::("envl").unwrap(); + match envl { + VSOB::VecString(l) => assert_eq!(l, vec!["a".to_string(), "b".to_string()]), + VSOB::Bool(_) => panic!("expected list"), + } +} + +#[cargo_test] +fn config_toml_errors() { + write_config( + "\ +[profile.dev] +opt-level = 'foo' +", + ); + + let config = new_config(&[]); + + assert_error( + config.get::("profile.dev").unwrap_err(), + "error in [..]/.cargo/config: \ + could not load config key `profile.dev.opt-level`: \ + must be an integer, `z`, or `s`, but found: foo", + ); + + let config = new_config(&[("CARGO_PROFILE_DEV_OPT_LEVEL", "asdf")]); + + assert_error( + config.get::("profile.dev").unwrap_err(), + "error in environment variable `CARGO_PROFILE_DEV_OPT_LEVEL`: \ + could not load config key `profile.dev.opt-level`: \ + must be an integer, `z`, or `s`, but found: asdf", + ); +} + +#[cargo_test] +fn load_nested() { + write_config( + "\ +[nest.foo] +f1 = 1 +f2 = 2 +[nest.bar] +asdf = 3 +", + ); + + let config = new_config(&[ + ("CARGO_NEST_foo_f2", "3"), + ("CARGO_NESTE_foo_f1", "1"), + ("CARGO_NESTE_foo_f2", "3"), + ("CARGO_NESTE_bar_asdf", "3"), + ]); + + type Nested = collections::HashMap>; + + let n: Nested = config.get("nest").unwrap(); + let mut expected = collections::HashMap::new(); + let mut foo = collections::HashMap::new(); + foo.insert("f1".to_string(), 1); + foo.insert("f2".to_string(), 3); + expected.insert("foo".to_string(), foo); + let mut bar = collections::HashMap::new(); + bar.insert("asdf".to_string(), 3); + expected.insert("bar".to_string(), bar); + assert_eq!(n, expected); + + let n: Nested = config.get("neste").unwrap(); + assert_eq!(n, expected); +} + +#[cargo_test] +fn get_errors() { + write_config( + "\ +[S] +f1 = 123 +f2 = 'asdf' +big = 123456789 +", + ); + + let config = new_config(&[("CARGO_E_S", "asdf"), ("CARGO_E_BIG", "123456789")]); + assert_error( + config.get::("foo").unwrap_err(), + "missing config key `foo`", + ); + assert_error( + config.get::("foo.bar").unwrap_err(), + "missing config key `foo.bar`", + ); + assert_error( + config.get::("S.f2").unwrap_err(), + "error in [..]/.cargo/config: `S.f2` expected an integer, but found a string", + ); + assert_error( + config.get::("S.big").unwrap_err(), + "error in [..].cargo/config: could not load config key `S.big`: \ + invalid value: integer `123456789`, expected u8", + ); + + // Environment variable type errors. + assert_error( + config.get::("e.s").unwrap_err(), + "error in environment variable `CARGO_E_S`: invalid digit found in string", + ); + assert_error( + config.get::("e.big").unwrap_err(), + "error in environment variable `CARGO_E_BIG`: \ + could not load config key `e.big`: \ + invalid value: integer `123456789`, expected i8", + ); + + #[derive(Debug, Deserialize)] + struct S { + f1: i64, + f2: String, + f3: i64, + big: i64, + } + assert_error( + config.get::("S").unwrap_err(), + "missing config key `S.f3`", + ); +} + +#[cargo_test] +fn config_get_option() { + write_config( + "\ +[foo] +f1 = 1 +", + ); + + let config = new_config(&[("CARGO_BAR_ASDF", "3")]); + + assert_eq!(config.get::>("a").unwrap(), None); + assert_eq!(config.get::>("a.b").unwrap(), None); + assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); + assert_eq!(config.get::>("bar.asdf").unwrap(), Some(3)); + assert_eq!(config.get::>("bar.zzzz").unwrap(), None); +} + +#[cargo_test] +fn config_bad_toml() { + write_config("asdf"); + let config = new_config(&[]); + assert_error( + config.get::("foo").unwrap_err(), + "\ +could not load Cargo configuration +Caused by: + could not parse TOML configuration in `[..]/.cargo/config` +Caused by: + could not parse input as TOML +Caused by: + expected an equals, found eof at line 1", + ); +} + +#[cargo_test] +fn config_get_list() { + write_config( + "\ +l1 = [] +l2 = ['one', 'two'] +l3 = 123 +l4 = ['one', 'two'] + +[nested] +l = ['x'] + +[nested2] +l = ['y'] + +[nested-empty] +", + ); + + type L = Vec; + + let config = new_config(&[ + ("CARGO_L4", "['three', 'four']"), + ("CARGO_L5", "['a']"), + ("CARGO_ENV_EMPTY", "[]"), + ("CARGO_ENV_BLANK", ""), + ("CARGO_ENV_NUM", "1"), + ("CARGO_ENV_NUM_LIST", "[1]"), + ("CARGO_ENV_TEXT", "asdf"), + ("CARGO_LEPAIR", "['a', 'b']"), + ("CARGO_NESTED2_L", "['z']"), + ("CARGO_NESTEDE_L", "['env']"), + ("CARGO_BAD_ENV", "[zzz]"), + ]); + + assert_eq!(config.get::("unset").unwrap(), vec![] as Vec); + assert_eq!(config.get::("l1").unwrap(), vec![] as Vec); + assert_eq!(config.get::("l2").unwrap(), vec!["one", "two"]); + assert_error( + config.get::("l3").unwrap_err(), + "\ +invalid configuration for key `l3` +expected a list, but found a integer for `l3` in [..]/.cargo/config", + ); + assert_eq!( + config.get::("l4").unwrap(), + vec!["one", "two", "three", "four"] + ); + assert_eq!(config.get::("l5").unwrap(), vec!["a"]); + assert_eq!(config.get::("env-empty").unwrap(), vec![] as Vec); + assert_error( + config.get::("env-blank").unwrap_err(), + "error in environment variable `CARGO_ENV_BLANK`: \ + should have TOML list syntax, found ``", + ); + assert_error( + config.get::("env-num").unwrap_err(), + "error in environment variable `CARGO_ENV_NUM`: \ + should have TOML list syntax, found `1`", + ); + assert_error( + config.get::("env-num-list").unwrap_err(), + "error in environment variable `CARGO_ENV_NUM_LIST`: \ + expected string, found integer", + ); + assert_error( + config.get::("env-text").unwrap_err(), + "error in environment variable `CARGO_ENV_TEXT`: \ + should have TOML list syntax, found `asdf`", + ); + // "invalid number" here isn't the best error, but I think it's just toml.rs. + assert_error( + config.get::("bad-env").unwrap_err(), + "error in environment variable `CARGO_BAD_ENV`: \ + could not parse TOML list: invalid number at line 1", + ); + + // Try some other sequence-like types. + assert_eq!( + config + .get::<(String, String, String, String)>("l4") + .unwrap(), + ( + "one".to_string(), + "two".to_string(), + "three".to_string(), + "four".to_string() + ) + ); + assert_eq!(config.get::<(String,)>("l5").unwrap(), ("a".to_string(),)); + + // Tuple struct + #[derive(Debug, Deserialize, Eq, PartialEq)] + struct TupS(String, String); + assert_eq!( + config.get::("lepair").unwrap(), + TupS("a".to_string(), "b".to_string()) + ); + + // Nested with an option. + #[derive(Debug, Deserialize, Eq, PartialEq)] + struct S { + l: Option>, + } + assert_eq!(config.get::("nested-empty").unwrap(), S { l: None }); + assert_eq!( + config.get::("nested").unwrap(), + S { + l: Some(vec!["x".to_string()]), + } + ); + assert_eq!( + config.get::("nested2").unwrap(), + S { + l: Some(vec!["y".to_string(), "z".to_string()]), + } + ); + assert_eq!( + config.get::("nestede").unwrap(), + S { + l: Some(vec!["env".to_string()]), + } + ); +} + +#[cargo_test] +fn config_get_other_types() { + write_config( + "\ +ns = 123 +ns2 = 456 +", + ); + + let config = new_config(&[("CARGO_NSE", "987"), ("CARGO_NS2", "654")]); + + #[derive(Debug, Deserialize, Eq, PartialEq)] + struct NewS(i32); + assert_eq!(config.get::("ns").unwrap(), NewS(123)); + assert_eq!(config.get::("ns2").unwrap(), NewS(654)); + assert_eq!(config.get::("nse").unwrap(), NewS(987)); + assert_error( + config.get::("unset").unwrap_err(), + "missing config key `unset`", + ); +} + +#[cargo_test] +fn config_relative_path() { + write_config(&format!( + "\ +p1 = 'foo/bar' +p2 = '../abc' +p3 = 'b/c' +abs = '{}' +", + paths::home().display(), + )); + + let config = new_config(&[("CARGO_EPATH", "a/b"), ("CARGO_P3", "d/e")]); + + assert_eq!( + config + .get::("p1") + .unwrap() + .path(), + paths::root().join("foo/bar") + ); + assert_eq!( + config + .get::("p2") + .unwrap() + .path(), + paths::root().join("../abc") + ); + assert_eq!( + config + .get::("p3") + .unwrap() + .path(), + paths::root().join("d/e") + ); + assert_eq!( + config + .get::("abs") + .unwrap() + .path(), + paths::home() + ); + assert_eq!( + config + .get::("epath") + .unwrap() + .path(), + paths::root().join("a/b") + ); +} + +#[cargo_test] +fn config_get_integers() { + write_config( + "\ +npos = 123456789 +nneg = -123456789 +i64max = 9223372036854775807 +", + ); + + let config = new_config(&[ + ("CARGO_EPOS", "123456789"), + ("CARGO_ENEG", "-1"), + ("CARGO_EI64MAX", "9223372036854775807"), + ]); + + assert_eq!( + config.get::("i64max").unwrap(), + 9_223_372_036_854_775_807 + ); + assert_eq!( + config.get::("i64max").unwrap(), + 9_223_372_036_854_775_807 + ); + assert_eq!( + config.get::("ei64max").unwrap(), + 9_223_372_036_854_775_807 + ); + assert_eq!( + config.get::("ei64max").unwrap(), + 9_223_372_036_854_775_807 + ); + + assert_error( + config.get::("nneg").unwrap_err(), + "error in [..].cargo/config: \ + could not load config key `nneg`: \ + invalid value: integer `-123456789`, expected u32", + ); + assert_error( + config.get::("eneg").unwrap_err(), + "error in environment variable `CARGO_ENEG`: \ + could not load config key `eneg`: \ + invalid value: integer `-1`, expected u32", + ); + assert_error( + config.get::("npos").unwrap_err(), + "error in [..].cargo/config: \ + could not load config key `npos`: \ + invalid value: integer `123456789`, expected i8", + ); + assert_error( + config.get::("epos").unwrap_err(), + "error in environment variable `CARGO_EPOS`: \ + could not load config key `epos`: \ + invalid value: integer `123456789`, expected i8", + ); +} diff --git a/tests/testsuite/corrupt_git.rs b/tests/testsuite/corrupt_git.rs new file mode 100644 index 00000000000..2f5223303fb --- /dev/null +++ b/tests/testsuite/corrupt_git.rs @@ -0,0 +1,160 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use crate::support::paths; +use crate::support::{basic_manifest, git, project}; +use cargo::util::paths as cargopaths; + +#[cargo_test] +fn deleting_database_files() { + let project = project(); + let git_project = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = {{ git = '{}' }} + "#, + git_project.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + project.cargo("build").run(); + + let mut files = Vec::new(); + find_files(&paths::home().join(".cargo/git/db"), &mut files); + assert!(!files.is_empty()); + + let log = "cargo::sources::git=trace"; + for file in files { + if !file.exists() { + continue; + } + println!("deleting {}", file.display()); + cargopaths::remove_file(&file).unwrap(); + project.cargo("build -v").env("CARGO_LOG", log).run(); + + if !file.exists() { + continue; + } + println!("truncating {}", file.display()); + make_writable(&file); + fs::OpenOptions::new() + .write(true) + .open(&file) + .unwrap() + .set_len(2) + .unwrap(); + project.cargo("build -v").env("CARGO_LOG", log).run(); + } +} + +#[cargo_test] +fn deleting_checkout_files() { + let project = project(); + let git_project = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = {{ git = '{}' }} + "#, + git_project.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + project.cargo("build").run(); + + let dir = paths::home() + .join(".cargo/git/checkouts") + // get the first entry in the checkouts dir for the package's location + .read_dir() + .unwrap() + .next() + .unwrap() + .unwrap() + .path() + // get the first child of that checkout dir for our checkout + .read_dir() + .unwrap() + .next() + .unwrap() + .unwrap() + .path() + // and throw on .git to corrupt things + .join(".git"); + let mut files = Vec::new(); + find_files(&dir, &mut files); + assert!(!files.is_empty()); + + let log = "cargo::sources::git=trace"; + for file in files { + if !file.exists() { + continue; + } + println!("deleting {}", file.display()); + cargopaths::remove_file(&file).unwrap(); + project.cargo("build -v").env("CARGO_LOG", log).run(); + + if !file.exists() { + continue; + } + println!("truncating {}", file.display()); + make_writable(&file); + fs::OpenOptions::new() + .write(true) + .open(&file) + .unwrap() + .set_len(2) + .unwrap(); + project.cargo("build -v").env("CARGO_LOG", log).run(); + } +} + +fn make_writable(path: &Path) { + let mut p = path.metadata().unwrap().permissions(); + p.set_readonly(false); + fs::set_permissions(path, p).unwrap(); +} + +fn find_files(path: &Path, dst: &mut Vec) { + for e in path.read_dir().unwrap() { + let e = e.unwrap(); + let path = e.path(); + if e.file_type().unwrap().is_dir() { + find_files(&path, dst); + } else { + dst.push(path); + } + } +} diff --git a/tests/testsuite/cross_compile.rs b/tests/testsuite/cross_compile.rs new file mode 100644 index 00000000000..c725c1a01e2 --- /dev/null +++ b/tests/testsuite/cross_compile.rs @@ -0,0 +1,1233 @@ +use crate::support::{basic_bin_manifest, basic_manifest, cross_compile, project}; +use crate::support::{is_nightly, rustc_host}; + +#[cargo_test] +fn simple_cross() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + &format!( + r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, + cross_compile::alternate() + ), + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + p.cargo("build -v --target").arg(&target).run(); + assert!(p.target_bin(&target, "foo").is_file()); + + p.process(&p.target_bin(&target, "foo")).run(); +} + +#[cargo_test] +fn simple_cross_config() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + ".cargo/config", + &format!( + r#" + [build] + target = "{}" + "#, + cross_compile::alternate() + ), + ) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + &format!( + r#" + fn main() {{ + assert_eq!(std::env::var("TARGET").unwrap(), "{}"); + }} + "#, + cross_compile::alternate() + ), + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + p.cargo("build -v").run(); + assert!(p.target_bin(&target, "foo").is_file()); + + p.process(&p.target_bin(&target, "foo")).run(); +} + +#[cargo_test] +fn simple_deps() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }") + .build(); + let _p2 = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let target = cross_compile::alternate(); + p.cargo("build --target").arg(&target).run(); + assert!(p.target_bin(&target, "foo").is_file()); + + p.process(&p.target_bin(&target, "foo")).run(); +} + +#[cargo_test] +fn plugin_deps() { + if cross_compile::disabled() { + return; + } + if !is_nightly() { + // plugins are unstable + return; + } + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate baz; + fn main() { + assert_eq!(bar!(), baz::baz()); + } + "#, + ) + .build(); + let _bar = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + + extern crate rustc_plugin; + extern crate syntax; + + use rustc_plugin::Registry; + use syntax::tokenstream::TokenTree; + use syntax::source_map::Span; + use syntax::ast::*; + use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; + use syntax::ext::build::AstBuilder; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + reg.register_macro("bar", expand_bar); + } + + fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) + -> Box { + MacEager::expr(cx.expr_lit(sp, LitKind::Int(1, LitIntType::Unsuffixed))) + } + "#, + ) + .build(); + let _baz = project() + .at("baz") + .file("Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + foo.cargo("build --target").arg(&target).run(); + assert!(foo.target_bin(&target, "foo").is_file()); + + foo.process(&foo.target_bin(&target, "foo")).run(); +} + +#[cargo_test] +fn plugin_to_the_max() { + if cross_compile::disabled() { + return; + } + if !is_nightly() { + // plugins are unstable + return; + } + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate baz; + fn main() { + assert_eq!(bar!(), baz::baz()); + } + "#, + ) + .build(); + let _bar = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + + extern crate rustc_plugin; + extern crate syntax; + extern crate baz; + + use rustc_plugin::Registry; + use syntax::tokenstream::TokenTree; + use syntax::source_map::Span; + use syntax::ast::*; + use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; + use syntax::ext::build::AstBuilder; + use syntax::ptr::P; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + reg.register_macro("bar", expand_bar); + } + + fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) + -> Box { + let bar = Ident::from_str("baz"); + let path = cx.path(sp, vec![bar.clone(), bar]); + MacEager::expr(cx.expr_call(sp, cx.expr_path(path), vec![])) + } + "#, + ) + .build(); + let _baz = project() + .at("baz") + .file("Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + foo.cargo("build -v --target").arg(&target).run(); + println!("second"); + foo.cargo("build -v --target").arg(&target).run(); + assert!(foo.target_bin(&target, "foo").is_file()); + + foo.process(&foo.target_bin(&target, "foo")).run(); +} + +#[cargo_test] +fn linker_and_ar() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let p = project() + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + ar = "my-ar-tool" + linker = "my-linker-tool" + "#, + target + ), + ) + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/foo.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + p.cargo("build -v --target") + .arg(&target) + .with_status(101) + .with_stderr_contains(&format!( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc --crate-name foo src/foo.rs --color never --crate-type bin \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [CWD]/target/{target}/debug/deps \ + --target {target} \ + -C ar=my-ar-tool -C linker=my-linker-tool \ + -L dependency=[CWD]/target/{target}/debug/deps \ + -L dependency=[CWD]/target/debug/deps` +", + target = target, + )) + .run(); +} + +#[cargo_test] +fn plugin_with_extra_dylib_dep() { + if cross_compile::disabled() { + return; + } + if !is_nightly() { + // plugins are unstable + return; + } + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + + fn main() {} + "#, + ) + .build(); + let _bar = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + + extern crate rustc_plugin; + extern crate baz; + + use rustc_plugin::Registry; + + #[plugin_registrar] + pub fn foo(reg: &mut Registry) { + println!("{}", baz::baz()); + } + "#, + ) + .build(); + let _baz = project() + .at("baz") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + + [lib] + name = "baz" + crate_type = ["dylib"] + "#, + ) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + let target = cross_compile::alternate(); + foo.cargo("build --target").arg(&target).run(); +} + +#[cargo_test] +fn cross_tests() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + authors = [] + version = "0.0.0" + + [[bin]] + name = "bar" + "#, + ) + .file( + "src/bin/bar.rs", + &format!( + r#" + #[allow(unused_extern_crates)] + extern crate foo; + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + #[test] fn test() {{ main() }} + "#, + cross_compile::alternate_arch() + ), + ) + .file( + "src/lib.rs", + &format!( + r#" + use std::env; + pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }} + #[test] fn test_foo() {{ foo() }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + p.cargo("test --target") + .arg(&target) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/{triple}/debug/deps/foo-[..][EXE] +[RUNNING] target/{triple}/debug/deps/bar-[..][EXE]", + triple = target + )) + .with_stdout_contains("test test_foo ... ok") + .with_stdout_contains("test test ... ok") + .run(); +} + +#[cargo_test] +fn no_cross_doctests() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "src/lib.rs", + r#" + //! ``` + //! extern crate foo; + //! assert!(true); + //! ``` + "#, + ) + .build(); + + let host_output = "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[DOCTEST] foo +"; + + println!("a"); + p.cargo("test").with_stderr(&host_output).run(); + + println!("b"); + let target = cross_compile::host(); + p.cargo("test --target") + .arg(&target) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/{triple}/debug/deps/foo-[..][EXE] +[DOCTEST] foo +", + triple = target + )) + .run(); + + println!("c"); + let target = cross_compile::alternate(); + p.cargo("test --target") + .arg(&target) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/{triple}/debug/deps/foo-[..][EXE] +", + triple = target + )) + .run(); +} + +#[cargo_test] +fn simple_cargo_run() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + p.cargo("run --target").arg(&target).run(); +} + +#[cargo_test] +fn cross_with_a_build_script() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#, + ) + .file( + "build.rs", + &format!( + r#" + use std::env; + use std::path::PathBuf; + fn main() {{ + assert_eq!(env::var("TARGET").unwrap(), "{0}"); + let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out"); + path.pop(); + assert!(path.file_name().unwrap().to_str().unwrap() + .starts_with("foo-")); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}"); + path.pop(); + assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target"); + }} + "#, + target + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v --target") + .arg(&target) + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ([CWD]) +[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..]` +[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build` +[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + target = target, + )) + .run(); +} + +#[cargo_test] +fn build_script_needed_for_host_and_target() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = 'build.rs' + + [dependencies.d1] + path = "d1" + [build-dependencies.d2] + path = "d2" + "#, + ) + .file( + "build.rs", + r#" + #[allow(unused_extern_crates)] + extern crate d2; + fn main() { d2::d2(); } + "#, + ) + .file( + "src/main.rs", + " + #[allow(unused_extern_crates)] + extern crate d1; + fn main() { d1::d1(); } + ", + ) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + build = 'build.rs' + "#, + ) + .file("d1/src/lib.rs", "pub fn d1() {}") + .file( + "d1/build.rs", + r#" + use std::env; + fn main() { + let target = env::var("TARGET").unwrap(); + println!("cargo:rustc-flags=-L /path/to/{}", target); + } + "#, + ) + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + + [dependencies.d1] + path = "../d1" + "#, + ) + .file( + "d2/src/lib.rs", + " + #[allow(unused_extern_crates)] + extern crate d1; + pub fn d2() { d1::d1(); } + ", + ) + .build(); + + p.cargo("build -v --target") + .arg(&target) + .with_stderr_contains(&"[COMPILING] d1 v0.0.0 ([CWD]/d1)") + .with_stderr_contains( + "[RUNNING] `rustc [..] d1/build.rs [..] --out-dir [CWD]/target/debug/build/d1-[..]`", + ) + .with_stderr_contains("[RUNNING] `[CWD]/target/debug/build/d1-[..]/build-script-build`") + .with_stderr_contains("[RUNNING] `rustc [..] d1/src/lib.rs [..]`") + .with_stderr_contains("[COMPILING] d2 v0.0.0 ([CWD]/d2)") + .with_stderr_contains(&format!( + "[RUNNING] `rustc [..] d2/src/lib.rs [..] -L /path/to/{host}`", + host = host + )) + .with_stderr_contains("[COMPILING] foo v0.0.0 ([CWD])") + .with_stderr_contains(&format!( + "[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..] \ + -L /path/to/{host}`", + host = host + )) + .with_stderr_contains(&format!( + "[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..] \ + -L /path/to/{target}`", + target = target + )) + .run(); +} + +#[cargo_test] +fn build_deps_for_the_right_arch() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.d2] + path = "d2" + "#, + ) + .file("src/main.rs", "extern crate d2; fn main() {}") + .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0")) + .file("d1/src/lib.rs", "pub fn d1() {}") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "../d1" + "#, + ) + .file("d2/build.rs", "extern crate d1; fn main() {}") + .file("d2/src/lib.rs", "") + .build(); + + let target = cross_compile::alternate(); + p.cargo("build -v --target").arg(&target).run(); +} + +#[cargo_test] +fn build_script_only_host() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "d1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("build.rs", "extern crate d1; fn main() {}") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file("d1/src/lib.rs", "pub fn d1() {}") + .file( + "d1/build.rs", + r#" + use std::env; + + fn main() { + assert!(env::var("OUT_DIR").unwrap().replace("\\", "/") + .contains("target/debug/build/d1-"), + "bad: {:?}", env::var("OUT_DIR")); + } + "#, + ) + .build(); + + let target = cross_compile::alternate(); + p.cargo("build -v --target").arg(&target).run(); +} + +#[cargo_test] +fn plugin_build_script_right_arch() { + if cross_compile::disabled() { + return; + } + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v --target") + .arg(cross_compile::alternate()) + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[..]/build-script-build` +[RUNNING] `rustc [..] src/lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn build_script_with_platform_specific_dependencies() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [build-dependencies.d1] + path = "d1" + "#, + ) + .file( + "build.rs", + " + #[allow(unused_extern_crates)] + extern crate d1; + fn main() {} + ", + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + &format!( + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + + [target.{}.dependencies] + d2 = {{ path = "../d2" }} + "#, + host + ), + ) + .file( + "d1/src/lib.rs", + "#[allow(unused_extern_crates)] extern crate d2;", + ) + .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.0")) + .file("d2/src/lib.rs", "") + .build(); + + p.cargo("build -v --target") + .arg(&target) + .with_stderr(&format!( + "\ +[COMPILING] d2 v0.0.0 ([..]) +[RUNNING] `rustc [..] d2/src/lib.rs [..]` +[COMPILING] d1 v0.0.0 ([..]) +[RUNNING] `rustc [..] d1/src/lib.rs [..]` +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] build.rs [..]` +[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build` +[RUNNING] `rustc [..] src/lib.rs [..] --target {target} [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + target = target + )) + .run(); +} + +#[cargo_test] +fn platform_specific_dependencies_do_not_leak() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [dependencies.d1] + path = "d1" + + [build-dependencies.d1] + path = "d1" + "#, + ) + .file("build.rs", "extern crate d1; fn main() {}") + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + &format!( + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + + [target.{}.dependencies] + d2 = {{ path = "../d2" }} + "#, + host + ), + ) + .file("d1/src/lib.rs", "extern crate d2;") + .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0")) + .file("d2/src/lib.rs", "") + .build(); + + p.cargo("build -v --target") + .arg(&target) + .with_status(101) + .with_stderr_contains("[..] can't find crate for `d2`[..]") + .run(); +} + +#[cargo_test] +fn platform_specific_variables_reflected_in_build_scripts() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [target.{host}.dependencies] + d1 = {{ path = "d1" }} + + [target.{target}.dependencies] + d2 = {{ path = "d2" }} + "#, + host = host, + target = target + ), + ) + .file( + "build.rs", + &format!( + r#" + use std::env; + + fn main() {{ + let platform = env::var("TARGET").unwrap(); + let (expected, not_expected) = match &platform[..] {{ + "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"), + "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"), + _ => panic!("unknown platform") + }}; + + env::var(expected).ok() + .expect(&format!("missing {{}}", expected)); + env::var(not_expected).err() + .expect(&format!("found {{}}", not_expected)); + }} + "#, + host = host, + target = target + ), + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.0" + authors = [] + links = "d1" + build = "build.rs" + "#, + ) + .file("d1/build.rs", r#"fn main() { println!("cargo:val=1") }"#) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.0" + authors = [] + links = "d2" + build = "build.rs" + "#, + ) + .file("d2/build.rs", r#"fn main() { println!("cargo:val=1") }"#) + .file("d2/src/lib.rs", "") + .build(); + + p.cargo("build -v").run(); + p.cargo("build -v --target").arg(&target).run(); +} + +#[cargo_test] +fn cross_test_dylib() { + if cross_compile::disabled() { + return; + } + + let target = cross_compile::alternate(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar as the_bar; + + pub fn bar() { the_bar::baz(); } + + #[test] + fn foo() { bar(); } + "#, + ) + .file( + "tests/test.rs", + r#" + extern crate foo as the_foo; + + #[test] + fn foo() { the_foo::bar(); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#, + ) + .file( + "bar/src/lib.rs", + &format!( + r#" + use std::env; + pub fn baz() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + p.cargo("test --target") + .arg(&target) + .with_stderr(&format!( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/{arch}/debug/deps/foo-[..][EXE] +[RUNNING] target/{arch}/debug/deps/test-[..][EXE]", + arch = cross_compile::alternate() + )) + .with_stdout_contains_n("test foo ... ok", 2) + .run(); +} diff --git a/tests/testsuite/cross_publish.rs b/tests/testsuite/cross_publish.rs new file mode 100644 index 00000000000..076a90361b4 --- /dev/null +++ b/tests/testsuite/cross_publish.rs @@ -0,0 +1,113 @@ +use std::fs::File; + +use crate::support::{cross_compile, project, publish, registry}; + +#[cargo_test] +fn simple_cross_package() { + if cross_compile::disabled() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#, + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + + p.cargo("package --target") + .arg(&target) + .with_stderr( + " Packaging foo v0.0.0 ([CWD]) + Verifying foo v0.0.0 ([CWD]) + Compiling foo v0.0.0 ([CWD]/target/package/foo-0.0.0) + Finished dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + // Check that the tarball contains the files + let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap(); + publish::validate_crate_contents( + f, + "foo-0.0.0.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + &[], + ); +} + +#[cargo_test] +fn publish_with_target() { + if cross_compile::disabled() { + return; + } + + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#, + ) + .file( + "src/main.rs", + &format!( + r#" + use std::env; + fn main() {{ + assert_eq!(env::consts::ARCH, "{}"); + }} + "#, + cross_compile::alternate_arch() + ), + ) + .build(); + + let target = cross_compile::alternate(); + + p.cargo("publish --index") + .arg(registry::registry_url().to_string()) + .arg("--target") + .arg(&target) + .with_stderr(&format!( + " Updating `{registry}` index + Packaging foo v0.0.0 ([CWD]) + Verifying foo v0.0.0 ([CWD]) + Compiling foo v0.0.0 ([CWD]/target/package/foo-0.0.0) + Finished dev [unoptimized + debuginfo] target(s) in [..] + Uploading foo v0.0.0 ([CWD]) +", + registry = registry::registry_path().to_str().unwrap() + )) + .run(); +} diff --git a/tests/testsuite/custom_target.rs b/tests/testsuite/custom_target.rs new file mode 100644 index 00000000000..8c57e3da9ca --- /dev/null +++ b/tests/testsuite/custom_target.rs @@ -0,0 +1,132 @@ +use crate::support::is_nightly; +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn custom_target_minimal() { + if !is_nightly() { + // Requires features no_core, lang_items + return; + } + let p = project() + .file( + "src/lib.rs", + r#" + #![feature(no_core)] + #![feature(lang_items)] + #![no_core] + + pub fn foo() -> u32 { + 42 + } + + #[lang = "sized"] + pub trait Sized { + // Empty. + } + #[lang = "copy"] + pub trait Copy { + // Empty. + } + "#, + ) + .file( + "custom-target.json", + r#" + { + "llvm-target": "x86_64-unknown-none-gnu", + "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", + "arch": "x86_64", + "target-endian": "little", + "target-pointer-width": "64", + "target-c-int-width": "32", + "os": "none", + "linker-flavor": "ld.lld" + } + "#, + ) + .build(); + + p.cargo("build --lib --target custom-target.json -v").run(); + p.cargo("build --lib --target src/../custom-target.json -v") + .run(); +} + +#[cargo_test] +fn custom_target_dependency() { + if !is_nightly() { + // Requires features no_core, lang_items, optin_builtin_traits + return; + } + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.1" + authors = ["author@example.com"] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(no_core)] + #![feature(lang_items)] + #![feature(optin_builtin_traits)] + #![no_core] + + extern crate bar; + + pub fn foo() -> u32 { + bar::bar() + } + + #[lang = "freeze"] + unsafe auto trait Freeze {} + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file( + "bar/src/lib.rs", + r#" + #![feature(no_core)] + #![feature(lang_items)] + #![no_core] + + pub fn bar() -> u32 { + 42 + } + + #[lang = "sized"] + pub trait Sized { + // Empty. + } + #[lang = "copy"] + pub trait Copy { + // Empty. + } + "#, + ) + .file( + "custom-target.json", + r#" + { + "llvm-target": "x86_64-unknown-none-gnu", + "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", + "arch": "x86_64", + "target-endian": "little", + "target-pointer-width": "64", + "target-c-int-width": "32", + "os": "none", + "linker-flavor": "ld.lld" + } + "#, + ) + .build(); + + p.cargo("build --lib --target custom-target.json -v").run(); +} diff --git a/tests/testsuite/death.rs b/tests/testsuite/death.rs new file mode 100644 index 00000000000..1c1addb3311 --- /dev/null +++ b/tests/testsuite/death.rs @@ -0,0 +1,143 @@ +use std::fs; +use std::io::{self, Read}; +use std::net::TcpListener; +use std::process::{Child, Stdio}; +use std::thread; + +use crate::{support::project, support::slow_cpu_multiplier}; + +#[cfg(unix)] +fn enabled() -> bool { + true +} + +// On Windows support for these tests is only enabled through the usage of job +// objects. Support for nested job objects, however, was added in recent-ish +// versions of Windows, so this test may not always be able to succeed. +// +// As a result, we try to add ourselves to a job object here +// can succeed or not. +#[cfg(windows)] +fn enabled() -> bool { + use winapi::um::{handleapi, jobapi, jobapi2, processthreadsapi}; + + unsafe { + // If we're not currently in a job, then we can definitely run these + // tests. + let me = processthreadsapi::GetCurrentProcess(); + let mut ret = 0; + let r = jobapi::IsProcessInJob(me, 0 as *mut _, &mut ret); + assert_ne!(r, 0); + if ret == ::winapi::shared::minwindef::FALSE { + return true; + } + + // If we are in a job, then we can run these tests if we can be added to + // a nested job (as we're going to create a nested job no matter what as + // part of these tests. + // + // If we can't be added to a nested job, then these tests will + // definitely fail, and there's not much we can do about that. + let job = jobapi2::CreateJobObjectW(0 as *mut _, 0 as *const _); + assert!(!job.is_null()); + let r = jobapi2::AssignProcessToJobObject(job, me); + handleapi::CloseHandle(job); + r != 0 + } +} + +#[cargo_test] +fn ctrl_c_kills_everyone() { + if !enabled() { + return; + } + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = listener.local_addr().unwrap(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + &format!( + r#" + use std::net::TcpStream; + use std::io::Read; + + fn main() {{ + let mut socket = TcpStream::connect("{}").unwrap(); + let _ = socket.read(&mut [0; 10]); + panic!("that read should never return"); + }} + "#, + addr + ), + ) + .build(); + + let mut cargo = p.cargo("build").build_command(); + cargo + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1"); + let mut child = cargo.spawn().unwrap(); + + let mut sock = listener.accept().unwrap().0; + ctrl_c(&mut child); + + assert!(!child.wait().unwrap().success()); + match sock.read(&mut [0; 10]) { + Ok(n) => assert_eq!(n, 0), + Err(e) => assert_eq!(e.kind(), io::ErrorKind::ConnectionReset), + } + + // Ok so what we just did was spawn cargo that spawned a build script, then + // we killed cargo in hopes of it killing the build script as well. If all + // went well the build script is now dead. On Windows, however, this is + // enforced with job objects which means that it may actually be in the + // *process* of being torn down at this point. + // + // Now on Windows we can't completely remove a file until all handles to it + // have been closed. Including those that represent running processes. So if + // we were to return here then there may still be an open reference to some + // file in the build directory. What we want to actually do is wait for the + // build script to *complete* exit. Take care of that by blowing away the + // build directory here, and panicking if we eventually spin too long + // without being able to. + for i in 0..10 { + match fs::remove_dir_all(&p.root().join("target")) { + Ok(()) => return, + Err(e) => println!("attempt {}: {}", i, e), + } + thread::sleep(slow_cpu_multiplier(100)); + } + + panic!( + "couldn't remove build directory after a few tries, seems like \ + we won't be able to!" + ); +} + +#[cfg(unix)] +fn ctrl_c(child: &mut Child) { + let r = unsafe { libc::kill(-(child.id() as i32), libc::SIGINT) }; + if r < 0 { + panic!("failed to kill: {}", io::Error::last_os_error()); + } +} + +#[cfg(windows)] +fn ctrl_c(child: &mut Child) { + child.kill().unwrap(); +} diff --git a/tests/testsuite/dep_info.rs b/tests/testsuite/dep_info.rs new file mode 100644 index 00000000000..dce5c4025d1 --- /dev/null +++ b/tests/testsuite/dep_info.rs @@ -0,0 +1,516 @@ +use crate::support::paths::{self, CargoPathExt}; +use crate::support::registry::Package; +use crate::support::{ + basic_bin_manifest, basic_manifest, is_nightly, main_file, project, rustc_host, Project, +}; +use filetime::FileTime; +use std::fs; +use std::path::Path; + +// Helper for testing dep-info files in the fingerprint dir. +fn assert_deps(project: &Project, fingerprint: &str, test_cb: impl Fn(&Path, &[(u8, &str)])) { + let mut files = project + .glob(fingerprint) + .map(|f| f.expect("unwrap glob result")) + // Filter out `.json` entries. + .filter(|f| f.extension().is_none()); + let info_path = files + .next() + .unwrap_or_else(|| panic!("expected 1 dep-info file at {}, found 0", fingerprint)); + assert!(files.next().is_none(), "expected only 1 dep-info file"); + let dep_info = fs::read(&info_path).unwrap(); + let deps: Vec<(u8, &str)> = dep_info + .split(|&x| x == 0) + .filter(|x| !x.is_empty()) + .map(|p| { + ( + p[0], + std::str::from_utf8(&p[1..]).expect("expected valid path"), + ) + }) + .collect(); + test_cb(&info_path, &deps); +} + +fn assert_deps_contains(project: &Project, fingerprint: &str, expected: &[(u8, &str)]) { + assert_deps(project, fingerprint, |info_path, entries| { + for (e_kind, e_path) in expected { + let pattern = glob::Pattern::new(e_path).unwrap(); + let count = entries + .iter() + .filter(|(kind, path)| kind == e_kind && pattern.matches(path)) + .count(); + if count != 1 { + panic!( + "Expected 1 match of {} {} in {:?}, got {}:\n{:#?}", + e_kind, e_path, info_path, count, entries + ); + } + } + }) +} + +#[cargo_test] +fn build_dep_info() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("build").run(); + + let depinfo_bin_path = &p.bin("foo").with_extension("d"); + + assert!(depinfo_bin_path.is_file()); + + let depinfo = p.read_file(depinfo_bin_path.to_str().unwrap()); + + let bin_path = p.bin("foo"); + let src_path = p.root().join("src").join("foo.rs"); + if !depinfo.lines().any(|line| { + line.starts_with(&format!("{}:", bin_path.display())) + && line.contains(src_path.to_str().unwrap()) + }) { + panic!( + "Could not find {:?}: {:?} in {:?}", + bin_path, src_path, depinfo_bin_path + ); + } +} + +#[cargo_test] +fn build_dep_info_lib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["lib"] + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + p.cargo("build --example=ex").run(); + assert!(p.example_lib("ex", "lib").with_extension("d").is_file()); +} + +#[cargo_test] +fn build_dep_info_rlib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["rlib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + p.cargo("build --example=ex").run(); + assert!(p.example_lib("ex", "rlib").with_extension("d").is_file()); +} + +#[cargo_test] +fn build_dep_info_dylib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[example]] + name = "ex" + crate-type = ["dylib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .build(); + + p.cargo("build --example=ex").run(); + assert!(p.example_lib("ex", "dylib").with_extension("d").is_file()); +} + +#[cargo_test] +fn no_rewrite_if_no_change() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build").run(); + let dep_info = p.root().join("target/debug/libfoo.d"); + let metadata1 = dep_info.metadata().unwrap(); + p.cargo("build").run(); + let metadata2 = dep_info.metadata().unwrap(); + + assert_eq!( + FileTime::from_last_modification_time(&metadata1), + FileTime::from_last_modification_time(&metadata2), + ); +} + +#[cargo_test] +fn relative_depinfo_paths_ws() { + if !is_nightly() { + // See https://github.com/rust-lang/rust/issues/63012 + return; + } + + // Test relative dep-info paths in a workspace with --target with + // proc-macros and other dependency kinds. + Package::new("regdep", "0.1.0") + .file("src/lib.rs", "pub fn f() {}") + .publish(); + Package::new("pmdep", "0.1.0") + .file("src/lib.rs", "pub fn f() {}") + .publish(); + Package::new("bdep", "0.1.0") + .file("src/lib.rs", "pub fn f() {}") + .publish(); + + let p = project() + /*********** Workspace ***********/ + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + "#, + ) + /*********** Main Project ***********/ + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + edition = "2018" + + [dependencies] + pm = {path = "../pm"} + bar = {path = "../bar"} + regdep = "0.1" + + [build-dependencies] + bdep = "0.1" + bar = {path = "../bar"} + "#, + ) + .file( + "foo/src/main.rs", + r#" + pm::noop!{} + + fn main() { + bar::f(); + regdep::f(); + } + "#, + ) + .file("foo/build.rs", "fn main() { bdep::f(); }") + /*********** Proc Macro ***********/ + .file( + "pm/Cargo.toml", + r#" + [package] + name = "pm" + version = "0.1.0" + edition = "2018" + + [lib] + proc-macro = true + + [dependencies] + pmdep = "0.1" + "#, + ) + .file( + "pm/src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro] + pub fn noop(_item: TokenStream) -> TokenStream { + pmdep::f(); + "".parse().unwrap() + } + "#, + ) + /*********** Path Dependency `bar` ***********/ + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn f() {}") + .build(); + + let host = rustc_host(); + p.cargo("build -Z binary-dep-depinfo --target") + .arg(&host) + .masquerade_as_nightly_cargo() + .with_stderr_contains("[COMPILING] foo [..]") + .run(); + + assert_deps_contains( + &p, + "target/debug/.fingerprint/pm-*/dep-lib-pm-*", + &[(1, "src/lib.rs"), (2, "debug/deps/libpmdep-*.rlib")], + ); + + assert_deps_contains( + &p, + &format!("target/{}/debug/.fingerprint/foo-*/dep-bin-foo-*", host), + &[ + (1, "src/main.rs"), + ( + 2, + &format!( + "debug/deps/{}pm-*.{}", + paths::get_lib_prefix("proc-macro"), + paths::get_lib_extension("proc-macro") + ), + ), + (2, &format!("{}/debug/deps/libbar-*.rlib", host)), + (2, &format!("{}/debug/deps/libregdep-*.rlib", host)), + ], + ); + + assert_deps_contains( + &p, + "target/debug/.fingerprint/foo-*/dep-build-script-build_script_build-*", + &[(1, "build.rs"), (2, "debug/deps/libbdep-*.rlib")], + ); + + // Make sure it stays fresh. + p.cargo("build -Z binary-dep-depinfo --target") + .arg(&host) + .masquerade_as_nightly_cargo() + .with_stderr("[FINISHED] dev [..]") + .run(); +} + +#[cargo_test] +fn relative_depinfo_paths_no_ws() { + if !is_nightly() { + // See https://github.com/rust-lang/rust/issues/63012 + return; + } + + // Test relative dep-info paths without a workspace with proc-macros and + // other dependency kinds. + Package::new("regdep", "0.1.0") + .file("src/lib.rs", "pub fn f() {}") + .publish(); + Package::new("pmdep", "0.1.0") + .file("src/lib.rs", "pub fn f() {}") + .publish(); + Package::new("bdep", "0.1.0") + .file("src/lib.rs", "pub fn f() {}") + .publish(); + + let p = project() + /*********** Main Project ***********/ + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + edition = "2018" + + [dependencies] + pm = {path = "pm"} + bar = {path = "bar"} + regdep = "0.1" + + [build-dependencies] + bdep = "0.1" + bar = {path = "bar"} + "#, + ) + .file( + "src/main.rs", + r#" + pm::noop!{} + + fn main() { + bar::f(); + regdep::f(); + } + "#, + ) + .file("build.rs", "fn main() { bdep::f(); }") + /*********** Proc Macro ***********/ + .file( + "pm/Cargo.toml", + r#" + [package] + name = "pm" + version = "0.1.0" + edition = "2018" + + [lib] + proc-macro = true + + [dependencies] + pmdep = "0.1" + "#, + ) + .file( + "pm/src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro] + pub fn noop(_item: TokenStream) -> TokenStream { + pmdep::f(); + "".parse().unwrap() + } + "#, + ) + /*********** Path Dependency `bar` ***********/ + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn f() {}") + .build(); + + p.cargo("build -Z binary-dep-depinfo") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[COMPILING] foo [..]") + .run(); + + assert_deps_contains( + &p, + "target/debug/.fingerprint/pm-*/dep-lib-pm-*", + &[(1, "src/lib.rs"), (2, "debug/deps/libpmdep-*.rlib")], + ); + + assert_deps_contains( + &p, + "target/debug/.fingerprint/foo-*/dep-bin-foo-*", + &[ + (1, "src/main.rs"), + ( + 2, + &format!( + "debug/deps/{}pm-*.{}", + paths::get_lib_prefix("proc-macro"), + paths::get_lib_extension("proc-macro") + ), + ), + (2, "debug/deps/libbar-*.rlib"), + (2, "debug/deps/libregdep-*.rlib"), + ], + ); + + assert_deps_contains( + &p, + "target/debug/.fingerprint/foo-*/dep-build-script-build_script_build-*", + &[(1, "build.rs"), (2, "debug/deps/libbdep-*.rlib")], + ); + + // Make sure it stays fresh. + p.cargo("build -Z binary-dep-depinfo") + .masquerade_as_nightly_cargo() + .with_stderr("[FINISHED] dev [..]") + .run(); +} + +#[cargo_test] +fn reg_dep_source_not_tracked() { + // Make sure source files in dep-info file are not tracked for registry dependencies. + Package::new("regdep", "0.1.0") + .file("src/lib.rs", "pub fn f() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + regdep = "0.1" + "#, + ) + .file("src/lib.rs", "pub fn f() { regdep::f(); }") + .build(); + + p.cargo("build").run(); + + assert_deps( + &p, + "target/debug/.fingerprint/regdep-*/dep-lib-regdep-*", + |info_path, entries| { + for (kind, path) in entries { + if *kind == 1 { + panic!( + "Did not expect package root relative path type: {:?} in {:?}", + path, info_path + ); + } + } + }, + ); +} + +#[cargo_test] +fn canonical_path() { + if !is_nightly() { + // See https://github.com/rust-lang/rust/issues/63012 + return; + } + if !crate::support::symlink_supported() { + return; + } + Package::new("regdep", "0.1.0") + .file("src/lib.rs", "pub fn f() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + regdep = "0.1" + "#, + ) + .file("src/lib.rs", "pub fn f() { regdep::f(); }") + .build(); + + let real = p.root().join("real_target"); + real.mkdir_p(); + p.symlink(real, "target"); + + p.cargo("build -Z binary-dep-depinfo") + .masquerade_as_nightly_cargo() + .run(); + + assert_deps_contains( + &p, + "target/debug/.fingerprint/foo-*/dep-lib-foo-*", + &[(1, "src/lib.rs"), (2, "debug/deps/libregdep-*.rlib")], + ); +} diff --git a/tests/testsuite/directory.rs b/tests/testsuite/directory.rs new file mode 100644 index 00000000000..b500b606556 --- /dev/null +++ b/tests/testsuite/directory.rs @@ -0,0 +1,769 @@ +use std::collections::HashMap; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::str; + +use serde::Serialize; + +use crate::support::cargo_process; +use crate::support::git; +use crate::support::paths; +use crate::support::registry::{cksum, Package}; +use crate::support::{basic_manifest, project, ProjectBuilder}; + +fn setup() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + br#" + [source.crates-io] + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "# + )); +} + +struct VendorPackage { + p: Option, + cksum: Checksum, +} + +#[derive(Serialize)] +struct Checksum { + package: Option, + files: HashMap, +} + +impl VendorPackage { + fn new(name: &str) -> VendorPackage { + VendorPackage { + p: Some(project().at(&format!("index/{}", name))), + cksum: Checksum { + package: Some(String::new()), + files: HashMap::new(), + }, + } + } + + fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage { + self.p = Some(self.p.take().unwrap().file(name, contents)); + self.cksum + .files + .insert(name.to_string(), cksum(contents.as_bytes())); + self + } + + fn disable_checksum(&mut self) -> &mut VendorPackage { + self.cksum.package = None; + self + } + + fn no_manifest(mut self) -> Self { + self.p = self.p.map(|pb| pb.no_manifest()); + self + } + + fn build(&mut self) { + let p = self.p.take().unwrap(); + let json = serde_json::to_string(&self.cksum).unwrap(); + let p = p.file(".cargo-checksum.json", &json); + let _ = p.build(); + } +} + +#[cargo_test] +fn simple() { + setup(); + + VendorPackage::new("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.1.0 ([CWD]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn simple_install() { + setup(); + + VendorPackage::new("foo") + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.0.1" + "#, + ) + .file( + "src/main.rs", + "extern crate foo; pub fn main() { foo::foo(); }", + ) + .build(); + + cargo_process("install bar") + .with_stderr( + "\ +[INSTALLING] bar v0.1.0 +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.1.0 +[FINISHED] release [optimized] target(s) in [..]s +[INSTALLING] [..]bar[..] +[INSTALLED] package `bar v0.1.0` (executable `bar[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); +} + +#[cargo_test] +fn simple_install_fail() { + setup(); + + VendorPackage::new("foo") + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + baz = "9.8.7" + "#, + ) + .file( + "src/main.rs", + "extern crate foo; pub fn main() { foo::foo(); }", + ) + .build(); + + cargo_process("install bar") + .with_status(101) + .with_stderr( + " Installing bar v0.1.0 +error: failed to compile `bar v0.1.0`, intermediate artifacts can be found at `[..]` + +Caused by: + no matching package named `baz` found +location searched: registry `https://github.com/rust-lang/crates.io-index` +perhaps you meant: bar or foo +required by package `bar v0.1.0` +", + ) + .run(); +} + +#[cargo_test] +fn install_without_feature_dep() { + setup(); + + VendorPackage::new("foo") + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.0.1" + baz = { version = "9.8.7", optional = true } + + [features] + wantbaz = ["baz"] + "#, + ) + .file( + "src/main.rs", + "extern crate foo; pub fn main() { foo::foo(); }", + ) + .build(); + + cargo_process("install bar") + .with_stderr( + "\ +[INSTALLING] bar v0.1.0 +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.1.0 +[FINISHED] release [optimized] target(s) in [..]s +[INSTALLING] [..]bar[..] +[INSTALLED] package `bar v0.1.0` (executable `bar[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); +} + +#[cargo_test] +fn not_there() { + setup(); + + let _ = project().at("index").build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: no matching package named `bar` found +location searched: [..] +required by package `foo v0.1.0 ([..])` +", + ) + .run(); +} + +#[cargo_test] +fn multiple() { + setup(); + + VendorPackage::new("bar-0.1.0") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .file(".cargo-checksum", "") + .build(); + + VendorPackage::new("bar-0.2.0") + .file("Cargo.toml", &basic_manifest("bar", "0.2.0")) + .file("src/lib.rs", "pub fn bar() {}") + .file(".cargo-checksum", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.1.0 ([CWD]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn crates_io_then_directory() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + let cksum = Package::new("bar", "0.1.0") + .file("src/lib.rs", "pub fn bar() -> u32 { 0 }") + .publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.1.0 ([CWD]) +[FINISHED] [..] +", + ) + .run(); + + setup(); + + let mut v = VendorPackage::new("bar"); + v.file("Cargo.toml", &basic_manifest("bar", "0.1.0")); + v.file("src/lib.rs", "pub fn bar() -> u32 { 1 }"); + v.cksum.package = Some(cksum); + v.build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.1.0 ([CWD]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn crates_io_then_bad_checksum() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + Package::new("bar", "0.1.0").publish(); + + p.cargo("build").run(); + setup(); + + VendorPackage::new("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: checksum for `bar v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g., a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `bar v0.1.0` is the same as when the lockfile was generated + +", + ) + .run(); +} + +#[cargo_test] +fn bad_file_checksum() { + setup(); + + VendorPackage::new("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "") + .build(); + + let mut f = t!(File::create(paths::root().join("index/bar/src/lib.rs"))); + t!(f.write_all(b"fn bar() -> u32 { 0 }")); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: the listed checksum of `[..]lib.rs` has changed: +expected: [..] +actual: [..] + +directory sources are not intended to be edited, if modifications are \ +required then it is recommended that [replace] is used with a forked copy of \ +the source +", + ) + .run(); +} + +#[cargo_test] +fn only_dot_files_ok() { + setup(); + + VendorPackage::new("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "") + .build(); + VendorPackage::new("foo") + .no_manifest() + .file(".bar", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn random_files_ok() { + setup(); + + VendorPackage::new("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "") + .build(); + VendorPackage::new("foo") + .no_manifest() + .file("bar", "") + .file("../test", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn git_lock_file_doesnt_change() { + let git = git::new("git", |p| { + p.file("Cargo.toml", &basic_manifest("git", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + VendorPackage::new("git") + .file("Cargo.toml", &basic_manifest("git", "0.5.0")) + .file("src/lib.rs", "") + .disable_checksum() + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git = {{ git = '{0}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + let mut lock1 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock1)); + + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + format!( + r#" + [source.my-git-repo] + git = '{}' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "#, + git.url() + ) + .as_bytes() + )); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ) + .run(); + + let mut lock2 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lock2)); + assert_eq!(lock1, lock2, "lock files changed"); +} + +#[cargo_test] +fn git_override_requires_lockfile() { + VendorPackage::new("git") + .file("Cargo.toml", &basic_manifest("git", "0.5.0")) + .file("src/lib.rs", "") + .disable_checksum() + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git = { git = 'https://example.com/' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + br#" + [source.my-git-repo] + git = 'https://example.com/' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "# + )); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to load source for a dependency on `git` + +Caused by: + Unable to update [..] + +Caused by: + the source my-git-repo requires a lock file to be present first before it can be +used against vendored source code + +remove the source replacement configuration, generate a lock file, and then +restore the source replacement configuration to continue the build + +", + ) + .run(); +} + +#[cargo_test] +fn workspace_different_locations() { + let p = project() + .no_manifest() + .file( + "foo/Cargo.toml", + r#" + [package] + name = 'foo' + version = '0.1.0' + + [dependencies] + baz = "*" + "#, + ) + .file("foo/src/lib.rs", "") + .file("foo/vendor/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("foo/vendor/baz/src/lib.rs", "") + .file("foo/vendor/baz/.cargo-checksum.json", "{\"files\":{}}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = 'bar' + version = '0.1.0' + + [dependencies] + baz = "*" + "#, + ) + .file("bar/src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + target-dir = './target' + + [source.crates-io] + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'foo/vendor' + "#, + ) + .build(); + + p.cargo("build").cwd("foo").run(); + p.cargo("build") + .cwd("bar") + .with_stderr( + "\ +[COMPILING] bar [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn version_missing() { + setup(); + + VendorPackage::new("foo") + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + VendorPackage::new("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "2" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + cargo_process("install bar") + .with_stderr( + "\ +[INSTALLING] bar v0.1.0 +error: failed to compile [..] + +Caused by: + failed to select a version for the requirement `foo = \"^2\"` + candidate versions found which didn't match: 0.0.1 + location searched: directory source `[..] (which is replacing registry `[..]`) +required by package `bar v0.1.0` +perhaps a crate was updated and forgotten to be re-vendored? +", + ) + .with_status(101) + .run(); +} diff --git a/tests/testsuite/doc.rs b/tests/testsuite/doc.rs new file mode 100644 index 00000000000..40f9423913a --- /dev/null +++ b/tests/testsuite/doc.rs @@ -0,0 +1,1404 @@ +use std::fs::{self, File}; +use std::io::Read; +use std::str; + +use crate::support::paths::CargoPathExt; +use crate::support::registry::Package; +use crate::support::{basic_lib_manifest, basic_manifest, git, project}; +use crate::support::{is_nightly, rustc_host}; + +#[cargo_test] +fn simple() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + p.cargo("doc") + .with_stderr( + "\ +[..] foo v0.0.1 ([CWD]) +[..] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/foo/index.html").is_file()); +} + +#[cargo_test] +fn doc_no_libs() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "foo" + doc = false + "#, + ) + .file("src/main.rs", "bad code") + .build(); + + p.cargo("doc").run(); +} + +#[cargo_test] +fn doc_twice() { + let p = project().file("src/lib.rs", "pub fn foo() {}").build(); + + p.cargo("doc") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("doc").with_stdout("").run(); +} + +#[cargo_test] +fn doc_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "extern crate bar; pub fn foo() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("doc") + .with_stderr( + "\ +[..] bar v0.0.1 ([CWD]/bar) +[..] bar v0.0.1 ([CWD]/bar) +[DOCUMENTING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/foo/index.html").is_file()); + assert!(p.root().join("target/doc/bar/index.html").is_file()); + + // Verify that it only emits rmeta for the dependency. + assert_eq!(p.glob("target/debug/**/*.rlib").count(), 0); + assert_eq!(p.glob("target/debug/deps/libbar-*.rmeta").count(), 1); + + p.cargo("doc") + .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint") + .with_stdout("") + .run(); + + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/foo/index.html").is_file()); + assert!(p.root().join("target/doc/bar/index.html").is_file()); +} + +#[cargo_test] +fn doc_no_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "extern crate bar; pub fn foo() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("doc --no-deps") + .with_stderr( + "\ +[CHECKING] bar v0.0.1 ([CWD]/bar) +[DOCUMENTING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/foo/index.html").is_file()); + assert!(!p.root().join("target/doc/bar/index.html").is_file()); +} + +#[cargo_test] +fn doc_only_bin() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "extern crate bar; pub fn foo() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("doc -v").run(); + + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/bar/index.html").is_file()); + assert!(p.root().join("target/doc/foo/index.html").is_file()); +} + +#[cargo_test] +fn doc_multiple_targets_same_name_lib() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [lib] + name = "foo_lib" + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [lib] + name = "foo_lib" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("doc --all") + .with_status(101) + .with_stderr_contains("[..] library `foo_lib` is specified [..]") + .with_stderr_contains("[..] `foo v0.1.0[..]` [..]") + .with_stderr_contains("[..] `bar v0.1.0[..]` [..]") + .run(); +} + +#[cargo_test] +fn doc_multiple_targets_same_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo_lib" + path = "src/foo_lib.rs" + "#, + ) + .file("foo/src/foo_lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [lib] + name = "foo_lib" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("doc --all") + .with_stderr_contains("[DOCUMENTING] foo v0.1.0 ([CWD]/foo)") + .with_stderr_contains("[DOCUMENTING] bar v0.1.0 ([CWD]/bar)") + .with_stderr_contains("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + assert!(p.root().join("target/doc").is_dir()); + let doc_file = p.root().join("target/doc/foo_lib/index.html"); + assert!(doc_file.is_file()); +} + +#[cargo_test] +fn doc_multiple_targets_same_name_bin() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#, + ) + .file("foo/src/foo-cli.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#, + ) + .file("bar/src/foo-cli.rs", "") + .build(); + + p.cargo("doc --all") + .with_status(101) + .with_stderr_contains("[..] binary `foo_cli` is specified [..]") + .with_stderr_contains("[..] `foo v0.1.0[..]` [..]") + .with_stderr_contains("[..] `bar v0.1.0[..]` [..]") + .run(); +} + +#[cargo_test] +fn doc_multiple_targets_same_name_undoced() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foo-cli" + "#, + ) + .file("foo/src/foo-cli.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + [[bin]] + name = "foo-cli" + doc = false + "#, + ) + .file("bar/src/foo-cli.rs", "") + .build(); + + p.cargo("doc --all").run(); +} + +#[cargo_test] +fn doc_lib_bin_same_name_documents_lib() { + let p = project() + .file( + "src/main.rs", + r#" + //! Binary documentation + extern crate foo; + fn main() { + foo::foo(); + } + "#, + ) + .file( + "src/lib.rs", + r#" + //! Library documentation + pub fn foo() {} + "#, + ) + .build(); + + p.cargo("doc") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.root().join("target/doc").is_dir()); + let doc_file = p.root().join("target/doc/foo/index.html"); + assert!(doc_file.is_file()); + let mut doc_html = String::new(); + File::open(&doc_file) + .unwrap() + .read_to_string(&mut doc_html) + .unwrap(); + assert!(doc_html.contains("Library")); + assert!(!doc_html.contains("Binary")); +} + +#[cargo_test] +fn doc_lib_bin_same_name_documents_lib_when_requested() { + let p = project() + .file( + "src/main.rs", + r#" + //! Binary documentation + extern crate foo; + fn main() { + foo::foo(); + } + "#, + ) + .file( + "src/lib.rs", + r#" + //! Library documentation + pub fn foo() {} + "#, + ) + .build(); + + p.cargo("doc --lib") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.root().join("target/doc").is_dir()); + let doc_file = p.root().join("target/doc/foo/index.html"); + assert!(doc_file.is_file()); + let mut doc_html = String::new(); + File::open(&doc_file) + .unwrap() + .read_to_string(&mut doc_html) + .unwrap(); + assert!(doc_html.contains("Library")); + assert!(!doc_html.contains("Binary")); +} + +#[cargo_test] +fn doc_lib_bin_same_name_documents_named_bin_when_requested() { + let p = project() + .file( + "src/main.rs", + r#" + //! Binary documentation + extern crate foo; + fn main() { + foo::foo(); + } + "#, + ) + .file( + "src/lib.rs", + r#" + //! Library documentation + pub fn foo() {} + "#, + ) + .build(); + + p.cargo("doc --bin foo") + .with_stderr( + "\ +[CHECKING] foo v0.0.1 ([CWD]) +[DOCUMENTING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.root().join("target/doc").is_dir()); + let doc_file = p.root().join("target/doc/foo/index.html"); + assert!(doc_file.is_file()); + let mut doc_html = String::new(); + File::open(&doc_file) + .unwrap() + .read_to_string(&mut doc_html) + .unwrap(); + assert!(!doc_html.contains("Library")); + assert!(doc_html.contains("Binary")); +} + +#[cargo_test] +fn doc_lib_bin_same_name_documents_bins_when_requested() { + let p = project() + .file( + "src/main.rs", + r#" + //! Binary documentation + extern crate foo; + fn main() { + foo::foo(); + } + "#, + ) + .file( + "src/lib.rs", + r#" + //! Library documentation + pub fn foo() {} + "#, + ) + .build(); + + p.cargo("doc --bins") + .with_stderr( + "\ +[CHECKING] foo v0.0.1 ([CWD]) +[DOCUMENTING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.root().join("target/doc").is_dir()); + let doc_file = p.root().join("target/doc/foo/index.html"); + assert!(doc_file.is_file()); + let mut doc_html = String::new(); + File::open(&doc_file) + .unwrap() + .read_to_string(&mut doc_html) + .unwrap(); + assert!(!doc_html.contains("Library")); + assert!(doc_html.contains("Binary")); +} + +#[cargo_test] +fn doc_dash_p() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "extern crate a;") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("doc -p a") + .with_stderr( + "\ +[..] b v0.0.1 ([CWD]/b) +[..] b v0.0.1 ([CWD]/b) +[DOCUMENTING] a v0.0.1 ([CWD]/a) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn doc_same_name() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/main.rs", "fn main() {}") + .file("examples/main.rs", "fn main() {}") + .file("tests/main.rs", "fn main() {}") + .build(); + + p.cargo("doc").run(); +} + +#[cargo_test] +fn doc_target() { + const TARGET: &str = "arm-unknown-linux-gnueabihf"; + + let p = project() + .file( + "src/lib.rs", + r#" + #![feature(no_core, lang_items)] + #![no_core] + + #[lang = "sized"] + trait Sized {} + + extern { + pub static A: u32; + } + "#, + ) + .build(); + + p.cargo("doc --verbose --target").arg(TARGET).run(); + assert!(p.root().join(&format!("target/{}/doc", TARGET)).is_dir()); + assert!(p + .root() + .join(&format!("target/{}/doc/foo/index.html", TARGET)) + .is_file()); +} + +#[cargo_test] +fn target_specific_not_documented() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "not rust") + .build(); + + p.cargo("doc").run(); +} + +#[cargo_test] +fn output_not_captured() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file( + "a/src/lib.rs", + " + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + ", + ) + .build(); + + p.cargo("doc") + .without_status() + .with_stderr_contains("[..]☃") + .with_stderr_contains(r"[..]unknown start of token: \u{2603}") + .run(); +} + +#[cargo_test] +fn target_specific_documented() { + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + a = {{ path = "a" }} + [target.{}.dependencies] + a = {{ path = "a" }} + "#, + rustc_host() + ), + ) + .file( + "src/lib.rs", + " + extern crate a; + + /// test + pub fn foo() {} + ", + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file( + "a/src/lib.rs", + " + /// test + pub fn foo() {} + ", + ) + .build(); + + p.cargo("doc").run(); +} + +#[cargo_test] +fn no_document_build_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [build-dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file( + "a/src/lib.rs", + " + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + ", + ) + .build(); + + p.cargo("doc").run(); +} + +#[cargo_test] +fn doc_release() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build --release").run(); + p.cargo("doc --release -v") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([..]) +[RUNNING] `rustdoc [..] src/lib.rs [..]` +[FINISHED] release [optimized] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn doc_multiple_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + + [dependencies.baz] + path = "baz" + "#, + ) + .file("src/lib.rs", "extern crate bar; pub fn foo() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("doc -p bar -p baz -v").run(); + + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/bar/index.html").is_file()); + assert!(p.root().join("target/doc/baz/index.html").is_file()); +} + +#[cargo_test] +fn features() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + + [features] + foo = ["bar/bar"] + "#, + ) + .file("src/lib.rs", r#"#[cfg(feature = "foo")] pub fn foo() {}"#) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + bar = [] + "#, + ) + .file( + "bar/build.rs", + r#" + fn main() { + println!("cargo:rustc-cfg=bar"); + } + "#, + ) + .file( + "bar/src/lib.rs", + r#"#[cfg(feature = "bar")] pub fn bar() {}"#, + ) + .build(); + p.cargo("doc --features foo").run(); + assert!(p.root().join("target/doc").is_dir()); + assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); + assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); +} + +#[cargo_test] +fn rerun_when_dir_removed() { + let p = project() + .file( + "src/lib.rs", + r#" + /// dox + pub fn foo() {} + "#, + ) + .build(); + + p.cargo("doc").run(); + assert!(p.root().join("target/doc/foo/index.html").is_file()); + + fs::remove_dir_all(p.root().join("target/doc/foo")).unwrap(); + + p.cargo("doc").run(); + assert!(p.root().join("target/doc/foo/index.html").is_file()); +} + +#[cargo_test] +fn document_only_lib() { + let p = project() + .file( + "src/lib.rs", + r#" + /// dox + pub fn foo() {} + "#, + ) + .file( + "src/bin/bar.rs", + r#" + /// ``` + /// ☃ + /// ``` + pub fn foo() {} + fn main() { foo(); } + "#, + ) + .build(); + p.cargo("doc --lib").run(); + assert!(p.root().join("target/doc/foo/index.html").is_file()); +} + +#[cargo_test] +fn plugins_no_use_target() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("doc --target=x86_64-unknown-openbsd -v").run(); +} + +#[cargo_test] +fn doc_all_workspace() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + // The order in which bar is compiled or documented is not deterministic + p.cargo("doc --all") + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Checking bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") + .run(); +} + +#[cargo_test] +fn doc_all_virtual_manifest() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + // The order in which bar and baz are documented is not guaranteed + p.cargo("doc --all") + .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .run(); +} + +#[cargo_test] +fn doc_virtual_manifest_all_implied() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + // The order in which bar and baz are documented is not guaranteed + p.cargo("doc") + .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .run(); +} + +#[cargo_test] +fn doc_all_member_dependency_same_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + + p.cargo("doc --all") + .with_stderr_contains("[..] Updating `[..]` index") + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .run(); +} + +#[cargo_test] +#[cfg(not(any(target_os = "windows", target_os = "macos")))] +fn doc_workspace_open_help_message() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo", "bar"] + "#, + ) + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .build(); + + // The order in which bar is compiled or documented is not deterministic + p.cargo("doc --all --open") + .env("BROWSER", "echo") + .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") + .with_stderr_contains("[..] Opening [..]/foo/index.html") + .run(); +} + +#[cargo_test] +#[cfg(not(any(target_os = "windows", target_os = "macos")))] +fn doc_workspace_open_different_library_and_package_names() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [lib] + name = "foolib" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("doc --open") + .env("BROWSER", "echo") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") + .with_stderr_contains("[..] [CWD]/target/doc/foolib/index.html") + .run(); +} + +#[cargo_test] +#[cfg(not(any(target_os = "windows", target_os = "macos")))] +fn doc_workspace_open_binary() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [[bin]] + name = "foobin" + path = "src/main.rs" + "#, + ) + .file("foo/src/main.rs", "") + .build(); + + p.cargo("doc --open") + .env("BROWSER", "echo") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") + .with_stderr_contains("[..] Opening [CWD]/target/doc/foobin/index.html") + .run(); +} + +#[cargo_test] +#[cfg(not(any(target_os = "windows", target_os = "macos")))] +fn doc_workspace_open_binary_and_library() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + "#, + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [lib] + name = "foolib" + [[bin]] + name = "foobin" + path = "src/main.rs" + "#, + ) + .file("foo/src/lib.rs", "") + .file("foo/src/main.rs", "") + .build(); + + p.cargo("doc --open") + .env("BROWSER", "echo") + .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") + .with_stderr_contains("[..] Opening [CWD]/target/doc/foolib/index.html") + .run(); +} + +#[cargo_test] +fn doc_edition() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + edition = "2018" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("doc -v") + .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") + .run(); + + p.cargo("test -v") + .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") + .run(); +} + +#[cargo_test] +fn doc_target_edition() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + edition = "2018" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("doc -v") + .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") + .run(); + + p.cargo("test -v") + .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") + .run(); +} + +// Tests an issue where depending on different versions of the same crate depending on `cfg`s +// caused `cargo doc` to fail. +#[cargo_test] +fn issue_5345() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.'cfg(all(windows, target_arch = "x86"))'.dependencies] + bar = "0.1" + + [target.'cfg(not(all(windows, target_arch = "x86")))'.dependencies] + bar = "0.2" + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .build(); + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.2.0").publish(); + + foo.cargo("build").run(); + foo.cargo("doc").run(); +} + +#[cargo_test] +fn doc_private_items() { + let foo = project() + .file("src/lib.rs", "mod private { fn private_item() {} }") + .build(); + foo.cargo("doc --document-private-items").run(); + + assert!(foo.root().join("target/doc").is_dir()); + assert!(foo + .root() + .join("target/doc/foo/private/index.html") + .is_file()); +} + +#[cargo_test] +fn doc_private_ws() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "fn p() {}") + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "fn p2() {}") + .file("b/src/main.rs", "fn main() {}") + .build(); + p.cargo("doc --all --bins --lib --document-private-items -v") + .with_stderr_contains( + "[RUNNING] `rustdoc [..] a/src/lib.rs [..]--document-private-items[..]", + ) + .with_stderr_contains( + "[RUNNING] `rustdoc [..] b/src/lib.rs [..]--document-private-items[..]", + ) + .with_stderr_contains( + "[RUNNING] `rustdoc [..] b/src/main.rs [..]--document-private-items[..]", + ) + .run(); +} + +const BAD_INTRA_LINK_LIB: &str = r#" +#![deny(intra_doc_link_resolution_failure)] + +/// [bad_link] +pub fn foo() {} +"#; + +#[cargo_test] +fn doc_cap_lints() { + if !is_nightly() { + // This can be removed once intra_doc_link_resolution_failure fails on stable. + return; + } + let a = git::new("a", |p| { + p.file("Cargo.toml", &basic_lib_manifest("a")) + .file("src/lib.rs", BAD_INTRA_LINK_LIB) + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("doc") + .with_stderr_unordered( + "\ +[UPDATING] git repository `[..]` +[DOCUMENTING] a v0.5.0 ([..]) +[CHECKING] a v0.5.0 ([..]) +[DOCUMENTING] foo v0.0.1 ([..]) +[FINISHED] dev [..] +", + ) + .run(); + + p.root().join("target").rm_rf(); + + p.cargo("doc -vv") + .with_stderr_contains( + "\ +[WARNING] `[bad_link]` cannot be resolved, ignoring it... +", + ) + .run(); +} + +#[cargo_test] +fn doc_message_format() { + if !is_nightly() { + // This can be removed once intra_doc_link_resolution_failure fails on stable. + return; + } + let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build(); + + p.cargo("doc --message-format=json") + .with_status(101) + .with_json_contains_unordered( + r#" + { + "message": { + "children": "{...}", + "code": "{...}", + "level": "error", + "message": "[..]", + "rendered": "[..]", + "spans": "{...}" + }, + "package_id": "foo [..]", + "reason": "compiler-message", + "target": "{...}" + } + "#, + ) + .run(); +} + +#[cargo_test] +fn short_message_format() { + if !is_nightly() { + // This can be removed once intra_doc_link_resolution_failure fails on stable. + return; + } + let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build(); + p.cargo("doc --message-format=short") + .with_status(101) + .with_stderr_contains( + "src/lib.rs:4:6: error: `[bad_link]` cannot be resolved, ignoring it...", + ) + .run(); +} + +#[cargo_test] +fn doc_example() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + edition = "2018" + + [[example]] + crate-type = ["lib"] + name = "ex1" + doc = true + "#, + ) + .file("src/lib.rs", "pub fn f() {}") + .file( + "examples/ex1.rs", + r#" + use foo::f; + + /// Example + pub fn x() { f(); } + "#, + ) + .build(); + + p.cargo("doc").run(); + assert!(p + .build_dir() + .join("doc") + .join("ex1") + .join("fn.x.html") + .exists()); +} diff --git a/tests/testsuite/edition.rs b/tests/testsuite/edition.rs new file mode 100644 index 00000000000..5d3f04d9f5a --- /dev/null +++ b/tests/testsuite/edition.rs @@ -0,0 +1,32 @@ +use crate::support::{basic_lib_manifest, project}; + +#[cargo_test] +fn edition_works_for_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = 'foo' + version = '0.1.0' + edition = '2018' + + [build-dependencies] + a = { path = 'a' } + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { + a::foo(); + } + "#, + ) + .file("a/Cargo.toml", &basic_lib_manifest("a")) + .file("a/src/lib.rs", "pub fn foo() {}") + .build(); + + p.cargo("build -v").run(); +} diff --git a/tests/testsuite/features.rs b/tests/testsuite/features.rs new file mode 100644 index 00000000000..1fe51e19ecb --- /dev/null +++ b/tests/testsuite/features.rs @@ -0,0 +1,1958 @@ +use std::fs::File; +use std::io::prelude::*; + +use crate::support::paths::CargoPathExt; +use crate::support::registry::Package; +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn invalid1() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` includes `baz` which is neither a dependency nor another feature +", + ) + .run(); +} + +#[cargo_test] +fn invalid2() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + + [dependencies.bar] + path = "foo" + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Features and dependencies cannot have the same name: `bar` +", + ) + .run(); +} + +#[cargo_test] +fn invalid3() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + bar = ["baz"] + + [dependencies.baz] + path = "foo" + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` depends on `baz` which is not an optional dependency. +Consider adding `optional = true` to the dependency +", + ) + .run(); +} + +#[cargo_test] +fn invalid4() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["bar"] + "#, + ) + .file("src/main.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to select a version for `bar`. + ... required by package `foo v0.0.1 ([..])` +versions that meet the requirements `*` are: 0.0.1 + +the package `foo` depends on `bar`, with features: `bar` but `bar` does not have these features. + + +failed to select a version for `bar` which could resolve this conflict", + ) + .run(); + + p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1")); + + p.cargo("build --features test") + .with_status(101) + .with_stderr("error: Package `foo v0.0.1 ([..])` does not have these features: `test`") + .run(); +} + +#[cargo_test] +fn invalid5() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies.bar] + path = "bar" + optional = true + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Dev-dependencies are not allowed to be optional: `bar` +", + ) + .run(); +} + +#[cargo_test] +fn invalid6() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build --features foo") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `foo` requires a feature of `bar` which is not a dependency +", + ) + .run(); +} + +#[cargo_test] +fn invalid7() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + bar = [] + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build --features foo") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `foo` requires a feature of `bar` which is not a dependency +", + ) + .run(); +} + +#[cargo_test] +fn invalid8() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["foo/bar"] + "#, + ) + .file("src/main.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build --features foo") + .with_status(101) + .with_stderr("[ERROR] feature names may not contain slashes: `foo/bar`") + .run(); +} + +#[cargo_test] +fn invalid9() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build --features bar") +.with_stderr( + "\ +error: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with that name, but only optional dependencies can be used as features. +", + ).with_status(101).run(); +} + +#[cargo_test] +fn invalid10() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + features = ["baz"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.baz] + path = "baz" + "#, + ) + .file("bar/src/lib.rs", "") + .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("bar/baz/src/lib.rs", "") + .build(); + + p.cargo("build").with_stderr("\ +error: failed to select a version for `bar`. + ... required by package `foo v0.0.1 ([..])` +versions that meet the requirements `*` are: 0.0.1 + +the package `foo` depends on `bar`, with features: `baz` but `bar` does not have these features. + It has a required dependency with that name, but only optional dependencies can be used as features. + + +failed to select a version for `bar` which could resolve this conflict +").with_status(101) + .run(); +} + +#[cargo_test] +fn no_transitive_dep_feature_requirement() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.derived] + path = "derived" + + [features] + default = ["derived/bar/qux"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate derived; + fn main() { derived::test(); } + "#, + ) + .file( + "derived/Cargo.toml", + r#" + [package] + name = "derived" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("derived/src/lib.rs", "extern crate bar; pub use bar::test;") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + qux = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(feature = "qux")] + pub fn test() { print!("test"); } + "#, + ) + .build(); + p.cargo("build") + .with_status(101) + .with_stderr("[ERROR] feature names may not contain slashes: `bar/qux`") + .run(); +} + +#[cargo_test] +fn no_feature_doesnt_build() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(feature = "bar")] + extern crate bar; + #[cfg(feature = "bar")] + fn main() { bar::bar(); println!("bar") } + #[cfg(not(feature = "bar"))] + fn main() {} + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.process(&p.bin("foo")).with_stdout("").run(); + + p.cargo("build --features bar") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.process(&p.bin("foo")).with_stdout("bar\n").run(); +} + +#[cargo_test] +fn default_feature_pulled_in() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["bar"] + + [dependencies.bar] + path = "bar" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(feature = "bar")] + extern crate bar; + #[cfg(feature = "bar")] + fn main() { bar::bar(); println!("bar") } + #[cfg(not(feature = "bar"))] + fn main() {} + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.process(&p.bin("foo")).with_stdout("bar\n").run(); + + p.cargo("build --no-default-features") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.process(&p.bin("foo")).with_stdout("").run(); +} + +#[cargo_test] +fn cyclic_feature() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["default"] + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr("[ERROR] cyclic feature dependency: feature `default` depends on itself") + .run(); +} + +#[cargo_test] +fn cyclic_feature2() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar"] + bar = ["foo"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn groups_on_groups_on_groups() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["f1"] + f1 = ["f2", "bar"] + f2 = ["f3", "f4"] + f3 = ["f5", "f6", "baz"] + f4 = ["f5", "f7"] + f5 = ["f6"] + f6 = ["f7"] + f7 = ["bar"] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn many_cli_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("build --features") + .arg("bar baz") + .with_stderr( + "\ +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn union_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + features = ["f1"] + [dependencies.d2] + path = "d2" + features = ["f2"] + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate d1; + extern crate d2; + fn main() { + d2::f1(); + d2::f2(); + } + "#, + ) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [features] + f1 = ["d2"] + + [dependencies.d2] + path = "../d2" + features = ["f1"] + optional = true + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [features] + f1 = [] + f2 = [] + "#, + ) + .file( + "d2/src/lib.rs", + r#" + #[cfg(feature = "f1")] pub fn f1() {} + #[cfg(feature = "f2")] pub fn f2() {} + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] d2 v0.0.1 ([CWD]/d2) +[COMPILING] d1 v0.0.1 ([CWD]/d1) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn many_features_no_rebuilds() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies.a] + path = "a" + features = ["fall"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + ftest = [] + ftest2 = [] + fall = ["ftest", "ftest2"] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] a v0.1.0 ([CWD]/a) +[COMPILING] b v0.1.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.root().move_into_the_past(); + + p.cargo("build -v") + .with_stderr( + "\ +[FRESH] a v0.1.0 ([..]/a) +[FRESH] b v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +// Tests that all cmd lines work with `--features ""` +#[cargo_test] +fn empty_features() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + p.cargo("build --features").arg("").run(); +} + +// Tests that all cmd lines work with `--features ""` +#[cargo_test] +fn transitive_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ["bar/baz"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::baz(); }") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + baz = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#"#[cfg(feature = "baz")] pub fn baz() {}"#, + ) + .build(); + + p.cargo("build --features foo").run(); +} + +#[cargo_test] +fn everything_in_the_lockfile() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + f1 = ["d1/f1"] + f2 = ["d2"] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + optional = true + [dependencies.d3] + path = "d3" + optional = true + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [features] + f1 = [] + "#, + ) + .file("d1/src/lib.rs", "") + .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.2")) + .file("d2/src/lib.rs", "") + .file( + "d3/Cargo.toml", + r#" + [package] + name = "d3" + version = "0.0.3" + authors = [] + + [features] + f3 = [] + "#, + ) + .file("d3/src/lib.rs", "") + .build(); + + p.cargo("fetch").run(); + let loc = p.root().join("Cargo.lock"); + let mut lockfile = String::new(); + t!(t!(File::open(&loc)).read_to_string(&mut lockfile)); + assert!( + lockfile.contains(r#"name = "d1""#), + "d1 not found\n{}", + lockfile + ); + assert!( + lockfile.contains(r#"name = "d2""#), + "d2 not found\n{}", + lockfile + ); + assert!( + lockfile.contains(r#"name = "d3""#), + "d3 not found\n{}", + lockfile + ); +} + +#[cargo_test] +fn no_rebuild_when_frobbing_default_feature() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "../a", features = ["f1"], default-features = false } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + default = ["f1"] + f1 = [] + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("build").with_stdout("").run(); + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn unions_work_with_no_default_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "a" } + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "extern crate a; pub fn foo() { a::a(); }") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = "../a", features = [], default-features = false } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + authors = [] + + [features] + default = ["f1"] + f1 = [] + "#, + ) + .file("a/src/lib.rs", r#"#[cfg(feature = "f1")] pub fn a() {}"#) + .build(); + + p.cargo("build").run(); + p.cargo("build").with_stdout("").run(); + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn optional_and_dev_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", optional = true } + [dev-dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] test v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn activating_feature_activates_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", optional = true } + + [features] + a = ["foo/a"] + "#, + ) + .file( + "src/lib.rs", + "extern crate foo; pub fn bar() { foo::bar(); }", + ) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + a = [] + "#, + ) + .file("foo/src/lib.rs", r#"#[cfg(feature = "a")] pub fn bar() {}"#) + .build(); + + p.cargo("build --features a -v").run(); +} + +#[cargo_test] +fn dep_feature_in_cmd_line() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.derived] + path = "derived" + "#, + ) + .file( + "src/main.rs", + r#" + extern crate derived; + fn main() { derived::test(); } + "#, + ) + .file( + "derived/Cargo.toml", + r#" + [package] + name = "derived" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [features] + default = [] + derived-feat = ["bar/some-feat"] + "#, + ) + .file("derived/src/lib.rs", "extern crate bar; pub use bar::test;") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + some-feat = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(feature = "some-feat")] + pub fn test() { print!("test"); } + "#, + ) + .build(); + + // The foo project requires that feature "some-feat" in "bar" is enabled. + // Building without any features enabled should fail: + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]unresolved import `bar::test`") + .run(); + + // We should be able to enable the feature "derived-feat", which enables "some-feat", + // on the command line. The feature is enabled, thus building should be successful: + p.cargo("build --features derived/derived-feat").run(); + + // Trying to enable features of transitive dependencies is an error + p.cargo("build --features bar/some-feat") + .with_status(101) + .with_stderr("error: Package `foo v0.0.1 ([..])` does not have these features: `bar`") + .run(); + + // Hierarchical feature specification should still be disallowed + p.cargo("build --features derived/bar/some-feat") + .with_status(101) + .with_stderr("[ERROR] feature names may not contain slashes: `bar/some-feat`") + .run(); +} + +#[cargo_test] +fn all_features_flag_enables_all_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = [] + bar = [] + + [dependencies.baz] + path = "baz" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(feature = "foo")] + pub fn foo() {} + + #[cfg(feature = "bar")] + pub fn bar() { + extern crate baz; + baz::baz(); + } + + fn main() { + foo(); + bar(); + } + "#, + ) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("build --all-features").run(); +} + +#[cargo_test] +fn many_cli_features_comma_delimited() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + fn main() {} + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("build --features bar,baz") + .with_stderr( + "\ +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn many_cli_features_comma_and_space_delimited() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [dependencies.baz] + path = "baz" + optional = true + + [dependencies.bam] + path = "bam" + optional = true + + [dependencies.bap] + path = "bap" + optional = true + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate bar; + #[allow(unused_extern_crates)] + extern crate baz; + #[allow(unused_extern_crates)] + extern crate bam; + #[allow(unused_extern_crates)] + extern crate bap; + fn main() {} + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .file("bam/Cargo.toml", &basic_manifest("bam", "0.0.1")) + .file("bam/src/lib.rs", "pub fn bam() {}") + .file("bap/Cargo.toml", &basic_manifest("bap", "0.0.1")) + .file("bap/src/lib.rs", "pub fn bap() {}") + .build(); + + p.cargo("build --features") + .arg("bar,baz bam bap") + .with_stderr( + "\ +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn combining_features_and_package() { + Package::new("dep", "1.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + members = ["bar"] + + [dependencies] + dep = "1" + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + [features] + main = [] + "#, + ) + .file( + "bar/src/main.rs", + r#" + #[cfg(feature = "main")] + fn main() {} + "#, + ) + .build(); + + p.cargo("build -Z package-features --all --features main") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains("[ERROR] cannot specify features for more than one package") + .run(); + + p.cargo("build -Z package-features --package dep --features main") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains("[ERROR] cannot specify features for packages outside of workspace") + .run(); + p.cargo("build -Z package-features --package dep --all-features") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains("[ERROR] cannot specify features for packages outside of workspace") + .run(); + p.cargo("build -Z package-features --package dep --no-default-features") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains("[ERROR] cannot specify features for packages outside of workspace") + .run(); + + p.cargo("build -Z package-features --all --all-features") + .masquerade_as_nightly_cargo() + .run(); + p.cargo("run -Z package-features --package bar --features main") + .masquerade_as_nightly_cargo() + .run(); +} + +#[cargo_test] +fn namespaced_invalid_feature() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["namespaced-features"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + namespaced-features = true + + [features] + bar = ["baz"] + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` includes `baz` which is not defined as a feature +", + ) + .run(); +} + +#[cargo_test] +fn namespaced_invalid_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["namespaced-features"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + namespaced-features = true + + [features] + bar = ["crate:baz"] + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` includes `crate:baz` which is not a known dependency +", + ) + .run(); +} + +#[cargo_test] +fn namespaced_non_optional_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["namespaced-features"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + namespaced-features = true + + [features] + bar = ["crate:baz"] + + [dependencies] + baz = "0.1" + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` includes `crate:baz` which is not an optional dependency. +Consider adding `optional = true` to the dependency +", + ) + .run(); +} + +#[cargo_test] +fn namespaced_implicit_feature() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["namespaced-features"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + namespaced-features = true + + [features] + bar = ["baz"] + + [dependencies] + baz = { version = "0.1", optional = true } + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").masquerade_as_nightly_cargo().run(); +} + +#[cargo_test] +fn namespaced_shadowed_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["namespaced-features"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + namespaced-features = true + + [features] + baz = [] + + [dependencies] + baz = { version = "0.1", optional = true } + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").masquerade_as_nightly_cargo().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `baz` includes the optional dependency of the same name, but this is left implicit in the features included by this feature. +Consider adding `crate:baz` to this feature's requirements. +", + ) + .run(); +} + +#[cargo_test] +fn namespaced_shadowed_non_optional() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["namespaced-features"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + namespaced-features = true + + [features] + baz = [] + + [dependencies] + baz = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").masquerade_as_nightly_cargo().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `baz` includes the dependency of the same name, but this is left implicit in the features included by this feature. +Additionally, the dependency must be marked as optional to be included in the feature definition. +Consider adding `crate:baz` to this feature's requirements and marking the dependency as `optional = true` +", + ) + .run(); +} + +#[cargo_test] +fn namespaced_implicit_non_optional() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["namespaced-features"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + namespaced-features = true + + [features] + bar = ["baz"] + + [dependencies] + baz = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").masquerade_as_nightly_cargo().with_status(101).with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + Feature `bar` includes `baz` which is not defined as a feature. +A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition +", + ).run( + ); +} + +#[cargo_test] +fn namespaced_same_name() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["namespaced-features"] + + [project] + name = "foo" + version = "0.0.1" + authors = [] + namespaced-features = true + + [features] + baz = ["crate:baz"] + + [dependencies] + baz = { version = "0.1", optional = true } + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").masquerade_as_nightly_cargo().run(); +} + +#[cargo_test] +fn only_dep_is_optional() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + foo = ['bar'] + + [dependencies] + bar = { version = "0.1", optional = true } + + [dev-dependencies] + bar = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn all_features_all_crates() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + members = ['bar'] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + foo = [] + "#, + ) + .file("bar/src/main.rs", "#[cfg(feature = \"foo\")] fn main() {}") + .build(); + + p.cargo("build --all-features --all").run(); +} + +#[cargo_test] +fn feature_off_dylib() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + + [package] + name = "foo" + version = "0.0.1" + + [lib] + crate-type = ["dylib"] + + [features] + f1 = [] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn hello() -> &'static str { + if cfg!(feature = "f1") { + "f1" + } else { + "no f1" + } + } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [dependencies] + foo = { path = ".." } + "#, + ) + .file( + "bar/src/main.rs", + r#" + extern crate foo; + + fn main() { + assert_eq!(foo::hello(), "no f1"); + } + "#, + ) + .build(); + + // Build the dylib with `f1` feature. + p.cargo("build --features f1").run(); + // Check that building without `f1` uses a dylib without `f1`. + p.cargo("run -p bar").run(); +} + +#[cargo_test] +fn warn_if_default_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + optional = true + + [features] + default-features = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build") + .with_stderr( + r#" +[WARNING] `default-features = [".."]` was found in [features]. Did you mean to use `default = [".."]`? +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] + "#.trim(), + ).run(); +} + +#[cargo_test] +fn no_feature_for_non_optional_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(not(feature = "bar"))] + fn main() { + } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + a = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build --features bar/a").run(); +} + +#[cargo_test] +fn features_option_given_twice() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + b = [] + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(all(feature = "a", feature = "b"))] + fn main() {} + "#, + ) + .build(); + + p.cargo("build --features a --features b").run(); +} + +#[cargo_test] +fn multi_multi_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + b = [] + c = [] + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(all(feature = "a", feature = "b", feature = "c"))] + fn main() {} + "#, + ) + .build(); + + p.cargo("build --features a --features").arg("b c").run(); +} diff --git a/tests/testsuite/fetch.rs b/tests/testsuite/fetch.rs new file mode 100644 index 00000000000..de8c5c0f0fb --- /dev/null +++ b/tests/testsuite/fetch.rs @@ -0,0 +1,114 @@ +use crate::support::registry::Package; +use crate::support::rustc_host; +use crate::support::{basic_manifest, cross_compile, project}; + +#[cargo_test] +fn no_deps() { + let p = project() + .file("src/main.rs", "mod a; fn main() {}") + .file("src/a.rs", "") + .build(); + + p.cargo("fetch").with_stdout("").run(); +} + +#[cargo_test] +fn fetch_all_platform_dependencies_when_no_target_is_given() { + if cross_compile::disabled() { + return; + } + + Package::new("d1", "1.2.3") + .file("Cargo.toml", &basic_manifest("d1", "1.2.3")) + .file("src/lib.rs", "") + .publish(); + + Package::new("d2", "0.1.2") + .file("Cargo.toml", &basic_manifest("d2", "0.1.2")) + .file("src/lib.rs", "") + .publish(); + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.{host}.dependencies] + d1 = "1.2.3" + + [target.{target}.dependencies] + d2 = "0.1.2" + "#, + host = host, + target = target + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("fetch") + .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]") + .with_stderr_contains("[DOWNLOADED] d2 v0.1.2 [..]") + .run(); +} + +#[cargo_test] +fn fetch_platform_specific_dependencies() { + if cross_compile::disabled() { + return; + } + + Package::new("d1", "1.2.3") + .file("Cargo.toml", &basic_manifest("d1", "1.2.3")) + .file("src/lib.rs", "") + .publish(); + + Package::new("d2", "0.1.2") + .file("Cargo.toml", &basic_manifest("d2", "0.1.2")) + .file("src/lib.rs", "") + .publish(); + + let target = cross_compile::alternate(); + let host = rustc_host(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.{host}.dependencies] + d1 = "1.2.3" + + [target.{target}.dependencies] + d2 = "0.1.2" + "#, + host = host, + target = target + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("fetch --target") + .arg(&host) + .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]") + .with_stderr_does_not_contain("[DOWNLOADED] d2 v0.1.2 [..]") + .run(); + + p.cargo("fetch --target") + .arg(&target) + .with_stderr_contains("[DOWNLOADED] d2 v0.1.2[..]") + .with_stderr_does_not_contain("[DOWNLOADED] d1 v1.2.3 [..]") + .run(); +} diff --git a/tests/testsuite/fix.rs b/tests/testsuite/fix.rs new file mode 100644 index 00000000000..225362ac10f --- /dev/null +++ b/tests/testsuite/fix.rs @@ -0,0 +1,1330 @@ +use std::fs::File; + +use git2; + +use crate::support::git; +use crate::support::{basic_manifest, clippy_is_available, is_nightly, project}; + +use std::io::Write; + +#[cargo_test] +fn do_not_fix_broken_builds() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn foo() { + let mut x = 3; + drop(x); + } + + pub fn foo2() { + let _x: u32 = "a"; + } + "#, + ) + .build(); + + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .with_status(101) + .with_stderr_contains("[ERROR] Could not compile `foo`.") + .run(); + assert!(p.read_file("src/lib.rs").contains("let mut x = 3;")); +} + +#[cargo_test] +fn fix_broken_if_requested() { + let p = project() + .file( + "src/lib.rs", + r#" + fn foo(a: &u32) -> u32 { a + 1 } + pub fn bar() { + foo(1); + } + "#, + ) + .build(); + + p.cargo("fix --allow-no-vcs --broken-code") + .env("__CARGO_FIX_YOLO", "1") + .run(); +} + +#[cargo_test] +fn broken_fixes_backed_out() { + // This works as follows: + // - Create a `rustc` shim (the "foo" project) which will pretend that the + // verification step fails. + // - There is an empty build script so `foo` has `OUT_DIR` to track the steps. + // - The first "check", `foo` creates a file in OUT_DIR, and it completes + // successfully with a warning diagnostic to remove unused `mut`. + // - rustfix removes the `mut`. + // - The second "check" to verify the changes, `foo` swaps out the content + // with something that fails to compile. It creates a second file so it + // won't do anything in the third check. + // - cargo fix discovers that the fix failed, and it backs out the changes. + // - The third "check" is done to display the original diagnostics of the + // original code. + let p = project() + .file( + "foo/Cargo.toml", + r#" + [package] + name = 'foo' + version = '0.1.0' + [workspace] + "#, + ) + .file( + "foo/src/main.rs", + r##" + use std::env; + use std::fs; + use std::io::Write; + use std::path::{Path, PathBuf}; + use std::process::{self, Command}; + + fn main() { + // Ignore calls to things like --print=file-names and compiling build.rs. + let is_lib_rs = env::args_os() + .map(PathBuf::from) + .any(|l| l == Path::new("src/lib.rs")); + if is_lib_rs { + let path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + let first = path.join("first"); + let second = path.join("second"); + if first.exists() && !second.exists() { + fs::write("src/lib.rs", b"not rust code").unwrap(); + fs::File::create(&second).unwrap(); + } else { + fs::File::create(&first).unwrap(); + } + } + + let status = Command::new("rustc") + .args(env::args().skip(1)) + .status() + .expect("failed to run rustc"); + process::exit(status.code().unwrap_or(2)); + } + "##, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = 'bar' + version = '0.1.0' + [workspace] + "#, + ) + .file("bar/build.rs", "fn main() {}") + .file( + "bar/src/lib.rs", + r#" + pub fn foo() { + let mut x = 3; + drop(x); + } + "#, + ) + .build(); + + // Build our rustc shim + p.cargo("build").cwd("foo").run(); + + // Attempt to fix code, but our shim will always fail the second compile + p.cargo("fix --allow-no-vcs --lib") + .cwd("bar") + .env("__CARGO_FIX_YOLO", "1") + .env("RUSTC", p.root().join("foo/target/debug/foo")) + .with_stderr_contains( + "warning: failed to automatically apply fixes suggested by rustc \ + to crate `bar`\n\ + \n\ + after fixes were automatically applied the compiler reported \ + errors within these files:\n\ + \n \ + * src/lib.rs\n\ + \n\ + This likely indicates a bug in either rustc or cargo itself,\n\ + and we would appreciate a bug report! You're likely to see \n\ + a number of compiler warnings after this message which cargo\n\ + attempted to fix but failed. If you could open an issue at\n\ + [..]\n\ + quoting the full output of this command we'd be very appreciative!\n\ + Note that you may be able to make some more progress in the near-term\n\ + fixing code with the `--broken-code` flag\n\ + \n\ + The following errors were reported:\n\ + error: expected one of `!` or `::`, found `rust`\n\ + ", + ) + .with_stderr_contains("Original diagnostics will follow.") + .with_stderr_contains("[WARNING] variable does not need to be mutable") + .with_stderr_does_not_contain("[..][FIXING][..]") + .run(); + + // Make sure the fix which should have been applied was backed out + assert!(p.read_file("bar/src/lib.rs").contains("let mut x = 3;")); +} + +#[cargo_test] +fn fix_path_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = 'bar' } + + [workspace] + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + + pub fn foo() -> u32 { + let mut x = 3; + x + } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file( + "bar/src/lib.rs", + r#" + pub fn foo() -> u32 { + let mut x = 3; + x + } + "#, + ) + .build(); + + p.cargo("fix --allow-no-vcs -p foo -p bar") + .env("__CARGO_FIX_YOLO", "1") + .with_stdout("") + .with_stderr_unordered( + "\ +[CHECKING] bar v0.1.0 ([..]) +[FIXING] bar/src/lib.rs (1 fix) +[CHECKING] foo v0.1.0 ([..]) +[FIXING] src/lib.rs (1 fix) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn do_not_fix_non_relevant_deps() { + let p = project() + .no_manifest() + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = '../bar' } + + [workspace] + "#, + ) + .file("foo/src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file( + "bar/src/lib.rs", + r#" + pub fn foo() -> u32 { + let mut x = 3; + x + } + "#, + ) + .build(); + + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .cwd("foo") + .run(); + + assert!(p.read_file("bar/src/lib.rs").contains("mut")); +} + +#[cargo_test] +fn prepare_for_2018() { + let p = project() + .file( + "src/lib.rs", + r#" + #![allow(unused)] + + mod foo { + pub const FOO: &str = "fooo"; + } + + mod bar { + use ::foo::FOO; + } + + fn main() { + let x = ::foo::FOO; + } + "#, + ) + .build(); + + let stderr = "\ +[CHECKING] foo v0.0.1 ([..]) +[FIXING] src/lib.rs (2 fixes) +[FINISHED] [..] +"; + p.cargo("fix --edition --allow-no-vcs") + .with_stderr(stderr) + .with_stdout("") + .run(); + + println!("{}", p.read_file("src/lib.rs")); + assert!(p.read_file("src/lib.rs").contains("use crate::foo::FOO;")); + assert!(p + .read_file("src/lib.rs") + .contains("let x = crate::foo::FOO;")); +} + +#[cargo_test] +fn local_paths() { + let p = project() + .file( + "src/lib.rs", + r#" + use test::foo; + + mod test { + pub fn foo() {} + } + + pub fn f() { + foo(); + } + "#, + ) + .build(); + + let stderr = "\ +[CHECKING] foo v0.0.1 ([..]) +[FIXING] src/lib.rs (1 fix) +[FINISHED] [..] +"; + + p.cargo("fix --edition --allow-no-vcs") + .with_stderr(stderr) + .with_stdout("") + .run(); + + println!("{}", p.read_file("src/lib.rs")); + assert!(p.read_file("src/lib.rs").contains("use crate::test::foo;")); +} + +#[cargo_test] +fn upgrade_extern_crate() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + edition = '2018' + + [workspace] + + [dependencies] + bar = { path = 'bar' } + "#, + ) + .file( + "src/lib.rs", + r#" + #![warn(rust_2018_idioms)] + extern crate bar; + + use bar::bar; + + pub fn foo() { + ::bar::bar(); + bar(); + } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + let stderr = "\ +[CHECKING] bar v0.1.0 ([..]) +[CHECKING] foo v0.1.0 ([..]) +[FIXING] src/lib.rs (1 fix) +[FINISHED] [..] +"; + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .with_stderr(stderr) + .with_stdout("") + .run(); + println!("{}", p.read_file("src/lib.rs")); + assert!(!p.read_file("src/lib.rs").contains("extern crate")); +} + +#[cargo_test] +fn specify_rustflags() { + let p = project() + .file( + "src/lib.rs", + r#" + #![allow(unused)] + + mod foo { + pub const FOO: &str = "fooo"; + } + + fn main() { + let x = ::foo::FOO; + } + "#, + ) + .build(); + + let stderr = "\ +[CHECKING] foo v0.0.1 ([..]) +[FIXING] src/lib.rs (1 fix) +[FINISHED] [..] +"; + p.cargo("fix --edition --allow-no-vcs") + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr(stderr) + .with_stdout("") + .run(); +} + +#[cargo_test] +fn no_changes_necessary() { + let p = project().file("src/lib.rs", "").build(); + + let stderr = "\ +[CHECKING] foo v0.0.1 ([..]) +[FINISHED] [..] +"; + p.cargo("fix --allow-no-vcs") + .with_stderr(stderr) + .with_stdout("") + .run(); +} + +#[cargo_test] +fn fixes_extra_mut() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn foo() -> u32 { + let mut x = 3; + x + } + "#, + ) + .build(); + + let stderr = "\ +[CHECKING] foo v0.0.1 ([..]) +[FIXING] src/lib.rs (1 fix) +[FINISHED] [..] +"; + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .with_stderr(stderr) + .with_stdout("") + .run(); +} + +#[cargo_test] +fn fixes_two_missing_ampersands() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn foo() -> u32 { + let mut x = 3; + let mut y = 3; + x + y + } + "#, + ) + .build(); + + let stderr = "\ +[CHECKING] foo v0.0.1 ([..]) +[FIXING] src/lib.rs (2 fixes) +[FINISHED] [..] +"; + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .with_stderr(stderr) + .with_stdout("") + .run(); +} + +#[cargo_test] +fn tricky() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn foo() -> u32 { + let mut x = 3; let mut y = 3; + x + y + } + "#, + ) + .build(); + + let stderr = "\ +[CHECKING] foo v0.0.1 ([..]) +[FIXING] src/lib.rs (2 fixes) +[FINISHED] [..] +"; + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .with_stderr(stderr) + .with_stdout("") + .run(); +} + +#[cargo_test] +fn preserve_line_endings() { + let p = project() + .file( + "src/lib.rs", + "fn add(a: &u32) -> u32 { a + 1 }\r\n\ + pub fn foo() -> u32 { let mut x = 3; add(&x) }\r\n\ + ", + ) + .build(); + + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .run(); + assert!(p.read_file("src/lib.rs").contains("\r\n")); +} + +#[cargo_test] +fn fix_deny_warnings() { + let p = project() + .file( + "src/lib.rs", + "#![deny(warnings)] + pub fn foo() { let mut x = 3; drop(x); } + ", + ) + .build(); + + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .run(); +} + +#[cargo_test] +fn fix_deny_warnings_but_not_others() { + let p = project() + .file( + "src/lib.rs", + " + #![deny(warnings)] + + pub fn foo() -> u32 { + let mut x = 3; + x + } + + fn bar() {} + ", + ) + .build(); + + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .run(); + assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;")); + assert!(p.read_file("src/lib.rs").contains("fn bar() {}")); +} + +#[cargo_test] +fn fix_two_files() { + let p = project() + .file( + "src/lib.rs", + " + pub mod bar; + + pub fn foo() -> u32 { + let mut x = 3; + x + } + ", + ) + .file( + "src/bar.rs", + " + pub fn foo() -> u32 { + let mut x = 3; + x + } + + ", + ) + .build(); + + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .with_stderr_contains("[FIXING] src/bar.rs (1 fix)") + .with_stderr_contains("[FIXING] src/lib.rs (1 fix)") + .run(); + assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;")); + assert!(!p.read_file("src/bar.rs").contains("let mut x = 3;")); +} + +#[cargo_test] +fn fixes_missing_ampersand() { + let p = project() + .file("src/main.rs", "fn main() { let mut x = 3; drop(x); }") + .file( + "src/lib.rs", + r#" + pub fn foo() { let mut x = 3; drop(x); } + + #[test] + pub fn foo2() { let mut x = 3; drop(x); } + "#, + ) + .file( + "tests/a.rs", + r#" + #[test] + pub fn foo() { let mut x = 3; drop(x); } + "#, + ) + .file("examples/foo.rs", "fn main() { let mut x = 3; drop(x); }") + .file("build.rs", "fn main() { let mut x = 3; drop(x); }") + .build(); + + p.cargo("fix --all-targets --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .with_stdout("") + .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") + .with_stderr_contains("[FIXING] build.rs (1 fix)") + // Don't assert number of fixes for this one, as we don't know if we're + // fixing it once or twice! We run this all concurrently, and if we + // compile (and fix) in `--test` mode first, we get two fixes. Otherwise + // we'll fix one non-test thing, and then fix another one later in + // test mode. + .with_stderr_contains("[FIXING] src/lib.rs[..]") + .with_stderr_contains("[FIXING] src/main.rs (1 fix)") + .with_stderr_contains("[FIXING] examples/foo.rs (1 fix)") + .with_stderr_contains("[FIXING] tests/a.rs (1 fix)") + .with_stderr_contains("[FINISHED] [..]") + .run(); + p.cargo("build").run(); + p.cargo("test").run(); +} + +#[cargo_test] +fn fix_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [features] + bar = [] + + [workspace] + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(feature = "bar")] + pub fn foo() -> u32 { let mut x = 3; x } + "#, + ) + .build(); + + p.cargo("fix --allow-no-vcs").run(); + p.cargo("build").run(); + p.cargo("fix --features bar --allow-no-vcs").run(); + p.cargo("build --features bar").run(); +} + +#[cargo_test] +fn shows_warnings() { + let p = project() + .file( + "src/lib.rs", + "#[deprecated] fn bar() {} pub fn foo() { let _ = bar(); }", + ) + .build(); + + p.cargo("fix --allow-no-vcs") + .with_stderr_contains("[..]warning: use of deprecated item[..]") + .run(); +} + +#[cargo_test] +fn warns_if_no_vcs_detected() { + let p = project().file("src/lib.rs", "pub fn foo() {}").build(); + + p.cargo("fix") + .with_status(101) + .with_stderr( + "error: no VCS found for this package and `cargo fix` can potentially perform \ + destructive changes; if you'd like to suppress this error pass `--allow-no-vcs`\ + ", + ) + .run(); + p.cargo("fix --allow-no-vcs").run(); +} + +#[cargo_test] +fn warns_about_dirty_working_directory() { + let p = project().file("src/lib.rs", "pub fn foo() {}").build(); + + let repo = git2::Repository::init(&p.root()).unwrap(); + let mut cfg = t!(repo.config()); + t!(cfg.set_str("user.email", "foo@bar.com")); + t!(cfg.set_str("user.name", "Foo Bar")); + drop(cfg); + git::add(&repo); + git::commit(&repo); + File::create(p.root().join("src/lib.rs")).unwrap(); + + p.cargo("fix") + .with_status(101) + .with_stderr( + "\ +error: the working directory of this package has uncommitted changes, \ +and `cargo fix` can potentially perform destructive changes; if you'd \ +like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \ +commit the changes to these files: + + * src/lib.rs (dirty) + + +", + ) + .run(); + p.cargo("fix --allow-dirty").run(); +} + +#[cargo_test] +fn warns_about_staged_working_directory() { + let p = project().file("src/lib.rs", "pub fn foo() {}").build(); + + let repo = git2::Repository::init(&p.root()).unwrap(); + let mut cfg = t!(repo.config()); + t!(cfg.set_str("user.email", "foo@bar.com")); + t!(cfg.set_str("user.name", "Foo Bar")); + drop(cfg); + git::add(&repo); + git::commit(&repo); + File::create(&p.root().join("src/lib.rs")) + .unwrap() + .write_all("pub fn bar() {}".to_string().as_bytes()) + .unwrap(); + git::add(&repo); + + p.cargo("fix") + .with_status(101) + .with_stderr( + "\ +error: the working directory of this package has uncommitted changes, \ +and `cargo fix` can potentially perform destructive changes; if you'd \ +like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \ +commit the changes to these files: + + * src/lib.rs (staged) + + +", + ) + .run(); + p.cargo("fix --allow-staged").run(); +} + +#[cargo_test] +fn does_not_warn_about_clean_working_directory() { + let p = project().file("src/lib.rs", "pub fn foo() {}").build(); + + let repo = git2::Repository::init(&p.root()).unwrap(); + let mut cfg = t!(repo.config()); + t!(cfg.set_str("user.email", "foo@bar.com")); + t!(cfg.set_str("user.name", "Foo Bar")); + drop(cfg); + git::add(&repo); + git::commit(&repo); + + p.cargo("fix").run(); +} + +#[cargo_test] +fn does_not_warn_about_dirty_ignored_files() { + let p = project() + .file("src/lib.rs", "pub fn foo() {}") + .file(".gitignore", "bar\n") + .build(); + + let repo = git2::Repository::init(&p.root()).unwrap(); + let mut cfg = t!(repo.config()); + t!(cfg.set_str("user.email", "foo@bar.com")); + t!(cfg.set_str("user.name", "Foo Bar")); + drop(cfg); + git::add(&repo); + git::commit(&repo); + File::create(p.root().join("bar")).unwrap(); + + p.cargo("fix").run(); +} + +#[cargo_test] +fn fix_all_targets_by_default() { + let p = project() + .file("src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }") + .file("tests/foo.rs", "pub fn foo() { let mut x = 3; drop(x); }") + .build(); + p.cargo("fix --allow-no-vcs") + .env("__CARGO_FIX_YOLO", "1") + .run(); + assert!(!p.read_file("src/lib.rs").contains("let mut x")); + assert!(!p.read_file("tests/foo.rs").contains("let mut x")); +} + +#[cargo_test] +fn prepare_for_and_enable() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = 'foo' + version = '0.1.0' + edition = '2018' + "#, + ) + .file("src/lib.rs", "") + .build(); + + let stderr = "\ +error: cannot prepare for the 2018 edition when it is enabled, so cargo cannot +automatically fix errors in `src/lib.rs` + +To prepare for the 2018 edition you should first remove `edition = '2018'` from +your `Cargo.toml` and then rerun this command. Once all warnings have been fixed +then you can re-enable the `edition` key in `Cargo.toml`. For some more +information about transitioning to the 2018 edition see: + + https://[..] + +"; + p.cargo("fix --edition --allow-no-vcs") + .with_stderr_contains(stderr) + .with_status(101) + .run(); +} + +#[cargo_test] +fn fix_overlapping() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn foo() {} + pub struct A; + + pub mod bar { + pub fn baz() { + ::foo::<::A>(); + } + } + "#, + ) + .build(); + + let stderr = "\ +[CHECKING] foo [..] +[FIXING] src/lib.rs (2 fixes) +[FINISHED] dev [..] +"; + + p.cargo("fix --allow-no-vcs --prepare-for 2018 --lib") + .with_stderr(stderr) + .run(); + + let contents = p.read_file("src/lib.rs"); + println!("{}", contents); + assert!(contents.contains("crate::foo::()")); +} + +#[cargo_test] +fn fix_idioms() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = 'foo' + version = '0.1.0' + edition = '2018' + "#, + ) + .file( + "src/lib.rs", + r#" + use std::any::Any; + pub fn foo() { + let _x: Box = Box::new(3); + } + "#, + ) + .build(); + + let stderr = "\ +[CHECKING] foo [..] +[FIXING] src/lib.rs (1 fix) +[FINISHED] [..] +"; + p.cargo("fix --edition-idioms --allow-no-vcs") + .with_stderr(stderr) + .run(); + + assert!(p.read_file("src/lib.rs").contains("Box")); +} + +#[cargo_test] +fn idioms_2015_ok() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("fix --edition-idioms --allow-no-vcs").run(); +} + +#[cargo_test] +fn both_edition_migrate_flags() { + let p = project().file("src/lib.rs", "").build(); + + let stderr = "\ +error: The argument '--edition' cannot be used with '--prepare-for ' + +USAGE: + cargo[..] fix --edition --message-format + +For more information try --help +"; + + p.cargo("fix --prepare-for 2018 --edition") + .with_status(1) + .with_stderr(stderr) + .run(); +} + +#[cargo_test] +fn shows_warnings_on_second_run_without_changes() { + let p = project() + .file( + "src/lib.rs", + r#" + #[deprecated] + fn bar() {} + + pub fn foo() { + let _ = bar(); + } + "#, + ) + .build(); + + p.cargo("fix --allow-no-vcs") + .with_stderr_contains("[..]warning: use of deprecated item[..]") + .run(); + + p.cargo("fix --allow-no-vcs") + .with_stderr_contains("[..]warning: use of deprecated item[..]") + .run(); +} + +#[cargo_test] +fn shows_warnings_on_second_run_without_changes_on_multiple_targets() { + let p = project() + .file( + "src/lib.rs", + r#" + #[deprecated] + fn bar() {} + + pub fn foo() { + let _ = bar(); + } + "#, + ) + .file( + "src/main.rs", + r#" + #[deprecated] + fn bar() {} + + fn main() { + let _ = bar(); + } + "#, + ) + .file( + "tests/foo.rs", + r#" + #[deprecated] + fn bar() {} + + #[test] + fn foo_test() { + let _ = bar(); + } + "#, + ) + .file( + "tests/bar.rs", + r#" + #[deprecated] + fn bar() {} + + #[test] + fn foo_test() { + let _ = bar(); + } + "#, + ) + .file( + "examples/fooxample.rs", + r#" + #[deprecated] + fn bar() {} + + fn main() { + let _ = bar(); + } + "#, + ) + .build(); + + p.cargo("fix --allow-no-vcs --all-targets") + .with_stderr_contains(" --> examples/fooxample.rs:6:29") + .with_stderr_contains(" --> src/lib.rs:6:29") + .with_stderr_contains(" --> src/main.rs:6:29") + .with_stderr_contains(" --> tests/bar.rs:7:29") + .with_stderr_contains(" --> tests/foo.rs:7:29") + .run(); + + p.cargo("fix --allow-no-vcs --all-targets") + .with_stderr_contains(" --> examples/fooxample.rs:6:29") + .with_stderr_contains(" --> src/lib.rs:6:29") + .with_stderr_contains(" --> src/main.rs:6:29") + .with_stderr_contains(" --> tests/bar.rs:7:29") + .with_stderr_contains(" --> tests/foo.rs:7:29") + .run(); +} + +#[cargo_test] +fn doesnt_rebuild_dependencies() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = 'bar' } + + [workspace] + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("fix --allow-no-vcs -p foo") + .env("__CARGO_FIX_YOLO", "1") + .with_stdout("") + .with_stderr( + "\ +[CHECKING] bar v0.1.0 ([..]) +[CHECKING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("fix --allow-no-vcs -p foo") + .env("__CARGO_FIX_YOLO", "1") + .with_stdout("") + .with_stderr( + "\ +[CHECKING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn does_not_crash_with_rustc_wrapper() { + // We don't have /usr/bin/env on Windows. + if cfg!(windows) { + return; + } + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("fix --allow-no-vcs") + .env("RUSTC_WRAPPER", "/usr/bin/env") + .run(); +} + +#[cargo_test] +fn only_warn_for_relevant_crates() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + a = { path = 'a' } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + "#, + ) + .file( + "a/src/lib.rs", + " + pub fn foo() {} + pub mod bar { + use foo; + pub fn baz() { foo() } + } + ", + ) + .build(); + + p.cargo("fix --allow-no-vcs --edition") + .with_stderr( + "\ +[CHECKING] a v0.1.0 ([..]) +[CHECKING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn fix_to_broken_code() { + let p = project() + .file( + "foo/Cargo.toml", + r#" + [package] + name = 'foo' + version = '0.1.0' + [workspace] + "#, + ) + .file( + "foo/src/main.rs", + r##" + use std::env; + use std::fs; + use std::io::Write; + use std::path::{Path, PathBuf}; + use std::process::{self, Command}; + + fn main() { + let is_lib_rs = env::args_os() + .map(PathBuf::from) + .any(|l| l == Path::new("src/lib.rs")); + if is_lib_rs { + let path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + let path = path.join("foo"); + if path.exists() { + panic!() + } else { + fs::File::create(&path).unwrap(); + } + } + + let status = Command::new("rustc") + .args(env::args().skip(1)) + .status() + .expect("failed to run rustc"); + process::exit(status.code().unwrap_or(2)); + } + "##, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = 'bar' + version = '0.1.0' + [workspace] + "#, + ) + .file("bar/build.rs", "fn main() {}") + .file("bar/src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }") + .build(); + + // Build our rustc shim + p.cargo("build").cwd("foo").run(); + + // Attempt to fix code, but our shim will always fail the second compile + p.cargo("fix --allow-no-vcs --broken-code") + .cwd("bar") + .env("RUSTC", p.root().join("foo/target/debug/foo")) + .with_status(101) + .with_stderr_contains("[WARNING] failed to automatically apply fixes [..]") + .run(); + + assert_eq!( + p.read_file("bar/src/lib.rs"), + "pub fn foo() { let x = 3; drop(x); }" + ); +} + +#[cargo_test] +fn fix_with_common() { + let p = project() + .file("src/lib.rs", "") + .file( + "tests/t1.rs", + "mod common; #[test] fn t1() { common::try(); }", + ) + .file( + "tests/t2.rs", + "mod common; #[test] fn t2() { common::try(); }", + ) + .file("tests/common/mod.rs", "pub fn try() {}") + .build(); + + p.cargo("fix --edition --allow-no-vcs").run(); + + assert_eq!(p.read_file("tests/common/mod.rs"), "pub fn r#try() {}"); +} + +#[cargo_test] +fn fix_in_existing_repo_weird_ignore() { + // Check that ignore doesn't ignore the repo itself. + let p = git::new("foo", |project| { + project + .file("src/lib.rs", "") + .file(".gitignore", "foo\ninner\n") + .file("inner/file", "") + }) + .unwrap(); + + p.cargo("fix").run(); + // This is questionable about whether it is the right behavior. It should + // probably be checking if any source file for the current project is + // ignored. + p.cargo("fix") + .cwd("inner") + .with_stderr_contains("[ERROR] no VCS found[..]") + .with_status(101) + .run(); + p.cargo("fix").cwd("src").run(); +} + +#[cargo_test] +fn fix_with_clippy() { + if !is_nightly() { + // fix --clippy is unstable + eprintln!("skipping test: requires nightly"); + return; + } + + if !clippy_is_available() { + return; + } + + let p = project() + .file( + "src/lib.rs", + " + pub fn foo() { + let mut v = Vec::::new(); + let _ = v.iter_mut().filter(|&ref a| a.is_empty()); + } + ", + ) + .build(); + + let stderr = "\ +[CHECKING] foo v0.0.1 ([..]) +[FIXING] src/lib.rs (1 fix) +[FINISHED] [..] +"; + + p.cargo("fix -Zunstable-options --clippy --allow-no-vcs") + .masquerade_as_nightly_cargo() + .with_stderr(stderr) + .with_stdout("") + .run(); + + assert_eq!( + p.read_file("src/lib.rs"), + " + pub fn foo() { + let mut v = Vec::::new(); + let _ = v.iter_mut().filter(|a| a.is_empty()); + } + " + ); +} diff --git a/tests/testsuite/freshness.rs b/tests/testsuite/freshness.rs new file mode 100644 index 00000000000..e103da47b77 --- /dev/null +++ b/tests/testsuite/freshness.rs @@ -0,0 +1,2051 @@ +use filetime::FileTime; +use std::fs::{self, File, OpenOptions}; +use std::io; +use std::io::prelude::*; +use std::net::TcpListener; +use std::path::{Path, PathBuf}; +use std::thread; +use std::time::SystemTime; + +use crate::support::paths::{self, CargoPathExt}; +use crate::support::registry::Package; +use crate::support::sleep_ms; +use crate::support::{basic_manifest, is_coarse_mtime, project}; + +#[cargo_test] +fn modifying_and_moving() { + let p = project() + .file("src/main.rs", "mod a; fn main() {}") + .file("src/a.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build").with_stdout("").run(); + p.root().move_into_the_past(); + p.root().join("target").move_into_the_past(); + + File::create(&p.root().join("src/a.rs")) + .unwrap() + .write_all(b"#[allow(unused)]fn main() {}") + .unwrap(); + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap(); + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]file not found[..]") + .run(); +} + +#[cargo_test] +fn modify_only_some_files() { + let p = project() + .file("src/lib.rs", "mod a;") + .file("src/a.rs", "") + .file("src/main.rs", "mod b; fn main() {}") + .file("src/b.rs", "") + .file("tests/test.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("test").run(); + sleep_ms(1000); + + assert!(p.bin("foo").is_file()); + + let lib = p.root().join("src/lib.rs"); + let bin = p.root().join("src/b.rs"); + + File::create(&lib) + .unwrap() + .write_all(b"invalid rust code") + .unwrap(); + File::create(&bin) + .unwrap() + .write_all(b"#[allow(unused)]fn foo() {}") + .unwrap(); + lib.move_into_the_past(); + + // Make sure the binary is rebuilt, not the lib + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.bin("foo").is_file()); +} + +#[cargo_test] +fn rebuild_sub_package_then_while_package() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies.a] + path = "a" + [dependencies.b] + path = "b" + "#, + ) + .file("src/lib.rs", "extern crate a; extern crate b;") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + authors = [] + version = "0.0.1" + [dependencies.b] + path = "../b" + "#, + ) + .file("a/src/lib.rs", "extern crate b;") + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + File::create(&p.root().join("b/src/lib.rs")) + .unwrap() + .write_all(br#"pub fn b() {}"#) + .unwrap(); + + p.cargo("build -pb").run(); + + File::create(&p.root().join("src/lib.rs")) + .unwrap() + .write_all(br#"extern crate a; extern crate b; pub fn toplevel() {}"#) + .unwrap(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn changing_lib_features_caches_targets() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [features] + foo = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build --features foo") + .with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + /* Targets should be cached from the first build */ + + p.cargo("build") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + + p.cargo("build").with_stdout("").run(); + + p.cargo("build --features foo") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); +} + +#[cargo_test] +fn changing_profiles_caches_targets() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [profile.dev] + panic = "abort" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("test") + .with_stderr( + "\ +[..]Compiling foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE] +[DOCTEST] foo +", + ) + .run(); + + /* Targets should be cached from the first build */ + + p.cargo("build") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + + p.cargo("test foo") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..]debug[..]deps[..]foo-[..][EXE] +", + ) + .run(); +} + +#[cargo_test] +fn changing_bin_paths_common_target_features_caches_targets() { + // Make sure dep_cache crate is built once per feature + let p = project() + .no_manifest() + .file( + ".cargo/config", + r#" + [build] + target-dir = "./target" + "#, + ) + .file( + "dep_crate/Cargo.toml", + r#" + [package] + name = "dep_crate" + version = "0.0.1" + authors = [] + + [features] + ftest = [] + "#, + ) + .file( + "dep_crate/src/lib.rs", + r#" + #[cfg(feature = "ftest")] + pub fn yo() { + println!("ftest on") + } + #[cfg(not(feature = "ftest"))] + pub fn yo() { + println!("ftest off") + } + "#, + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + dep_crate = {path = "../dep_crate", features = []} + "#, + ) + .file("a/src/lib.rs", "") + .file( + "a/src/main.rs", + r#" + extern crate dep_crate; + use dep_crate::yo; + fn main() { + yo(); + } + "#, + ) + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + dep_crate = {path = "../dep_crate", features = ["ftest"]} + "#, + ) + .file("b/src/lib.rs", "") + .file( + "b/src/main.rs", + r#" + extern crate dep_crate; + use dep_crate::yo; + fn main() { + yo(); + } + "#, + ) + .build(); + + /* Build and rebuild a/. Ensure dep_crate only builds once */ + p.cargo("run") + .cwd("a") + .with_stdout("ftest off") + .with_stderr( + "\ +[..]Compiling dep_crate v0.0.1 ([..]) +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target/debug/a[EXE]` +", + ) + .run(); + p.cargo("clean -p a").cwd("a").run(); + p.cargo("run") + .cwd("a") + .with_stdout("ftest off") + .with_stderr( + "\ +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target/debug/a[EXE]` +", + ) + .run(); + + /* Build and rebuild b/. Ensure dep_crate only builds once */ + p.cargo("run") + .cwd("b") + .with_stdout("ftest on") + .with_stderr( + "\ +[..]Compiling dep_crate v0.0.1 ([..]) +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target/debug/b[EXE]` +", + ) + .run(); + p.cargo("clean -p b").cwd("b").run(); + p.cargo("run") + .cwd("b") + .with_stdout("ftest on") + .with_stderr( + "\ +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target/debug/b[EXE]` +", + ) + .run(); + + /* Build a/ package again. If we cache different feature dep builds correctly, + * this should not cause a rebuild of dep_crate */ + p.cargo("clean -p a").cwd("a").run(); + p.cargo("run") + .cwd("a") + .with_stdout("ftest off") + .with_stderr( + "\ +[..]Compiling a v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target/debug/a[EXE]` +", + ) + .run(); + + /* Build b/ package again. If we cache different feature dep builds correctly, + * this should not cause a rebuild */ + p.cargo("clean -p b").cwd("b").run(); + p.cargo("run") + .cwd("b") + .with_stdout("ftest on") + .with_stderr( + "\ +[..]Compiling b v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]target/debug/b[EXE]` +", + ) + .run(); +} + +#[cargo_test] +fn changing_bin_features_caches_targets() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [features] + foo = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + let msg = if cfg!(feature = "foo") { "feature on" } else { "feature off" }; + println!("{}", msg); + } + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.rename_run("foo", "off1").with_stdout("feature off").run(); + + p.cargo("build --features foo") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.rename_run("foo", "on1").with_stdout("feature on").run(); + + /* Targets should be cached from the first build */ + + p.cargo("build") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.rename_run("foo", "off2").with_stdout("feature off").run(); + + p.cargo("build --features foo") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.rename_run("foo", "on2").with_stdout("feature on").run(); +} + +#[cargo_test] +fn rebuild_tests_if_lib_changes() { + let p = project() + .file("src/lib.rs", "pub fn foo() {}") + .file( + "tests/foo.rs", + r#" + extern crate foo; + #[test] + fn test() { foo::foo(); } + "#, + ) + .build(); + + p.cargo("build").run(); + p.cargo("test").run(); + + sleep_ms(1000); + File::create(&p.root().join("src/lib.rs")).unwrap(); + + p.cargo("build -v").run(); + p.cargo("test -v") + .with_status(101) + .with_stderr_contains("[..]cannot find function `foo`[..]") + .run(); +} + +#[cargo_test] +fn no_rebuild_transitive_target_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + [dev-dependencies] + b = { path = "b" } + "#, + ) + .file("src/lib.rs", "") + .file("tests/foo.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [target.foo.dependencies] + c = { path = "../c" } + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + c = { path = "../c" } + "#, + ) + .file("b/src/lib.rs", "") + .file("c/Cargo.toml", &basic_manifest("c", "0.0.1")) + .file("c/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("test --no-run") + .with_stderr( + "\ +[COMPILING] c v0.0.1 ([..]) +[COMPILING] b v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn rerun_if_changed_in_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "a/build.rs", + r#" + fn main() { + println!("cargo:rerun-if-changed=build.rs"); + } + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn same_build_dir_cached_packages() { + let p = project() + .no_manifest() + .file( + "a1/Cargo.toml", + r#" + [package] + name = "a1" + version = "0.0.1" + authors = [] + [dependencies] + b = { path = "../b" } + "#, + ) + .file("a1/src/lib.rs", "") + .file( + "a2/Cargo.toml", + r#" + [package] + name = "a2" + version = "0.0.1" + authors = [] + [dependencies] + b = { path = "../b" } + "#, + ) + .file("a2/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + [dependencies] + c = { path = "../c" } + "#, + ) + .file("b/src/lib.rs", "") + .file( + "c/Cargo.toml", + r#" + [package] + name = "c" + version = "0.0.1" + authors = [] + [dependencies] + d = { path = "../d" } + "#, + ) + .file("c/src/lib.rs", "") + .file("d/Cargo.toml", &basic_manifest("d", "0.0.1")) + .file("d/src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + target-dir = "./target" + "#, + ) + .build(); + + p.cargo("build") + .cwd("a1") + .with_stderr(&format!( + "\ +[COMPILING] d v0.0.1 ({dir}/d) +[COMPILING] c v0.0.1 ({dir}/c) +[COMPILING] b v0.0.1 ({dir}/b) +[COMPILING] a1 v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dir = p.url().to_file_path().unwrap().to_str().unwrap() + )) + .run(); + p.cargo("build") + .cwd("a2") + .with_stderr( + "\ +[COMPILING] a2 v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn no_rebuild_if_build_artifacts_move_backwards_in_time() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + p.root().move_into_the_past(); + + p.cargo("build") + .with_stdout("") + .with_stderr("[FINISHED] [..]") + .run(); +} + +#[cargo_test] +fn rebuild_if_build_artifacts_move_forward_in_time() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + p.root().move_into_the_future(); + + p.cargo("build") + .env("CARGO_LOG", "") + .with_stdout("") + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn rebuild_if_environment_changes() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + description = "old desc" + version = "0.0.1" + authors = [] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + println!("{}", env!("CARGO_PKG_DESCRIPTION")); + } + "#, + ) + .build(); + + p.cargo("run") + .with_stdout("old desc") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/foo[EXE]` +", + ) + .run(); + + File::create(&p.root().join("Cargo.toml")) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + description = "new desc" + version = "0.0.1" + authors = [] + "#, + ) + .unwrap(); + + p.cargo("run") + .with_stdout("new desc") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/foo[EXE]` +", + ) + .run(); +} + +#[cargo_test] +fn no_rebuild_when_rename_dir() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", &basic_manifest("foo", "0.0.1")) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + let mut new = p.root(); + new.pop(); + new.push("bar"); + fs::rename(p.root(), &new).unwrap(); + + p.cargo("build") + .cwd(&new) + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); +} + +#[cargo_test] +fn unused_optional_dep() { + Package::new("registry1", "0.1.0").publish(); + Package::new("registry2", "0.1.0").publish(); + Package::new("registry3", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "p" + authors = [] + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + baz = { path = "baz" } + registry1 = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.1" + authors = [] + + [dev-dependencies] + registry2 = "*" + "#, + ) + .file("bar/src/lib.rs", "") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.1" + authors = [] + + [dependencies] + registry3 = { version = "*", optional = true } + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn path_dev_dep_registry_updates() { + Package::new("registry1", "0.1.0").publish(); + Package::new("registry2", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "p" + authors = [] + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.1" + authors = [] + + [dependencies] + registry1 = "*" + + [dev-dependencies] + baz = { path = "../baz"} + "#, + ) + .file("bar/src/lib.rs", "") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.1" + authors = [] + + [dependencies] + registry2 = "*" + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn change_panic_mode() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ['bar', 'baz'] + [profile.dev] + panic = 'abort' + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) + .file("bar/src/lib.rs", "") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.1" + authors = [] + + [lib] + proc-macro = true + + [dependencies] + bar = { path = '../bar' } + "#, + ) + .file("baz/src/lib.rs", "extern crate bar;") + .build(); + + p.cargo("build -p bar").run(); + p.cargo("build -p baz").run(); +} + +#[cargo_test] +fn dont_rebuild_based_on_plugins() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.1" + + [workspace] + members = ['baz'] + + [dependencies] + proc-macro-thing = { path = 'proc-macro-thing' } + "#, + ) + .file("src/lib.rs", "") + .file( + "proc-macro-thing/Cargo.toml", + r#" + [package] + name = "proc-macro-thing" + version = "0.1.1" + + [lib] + proc-macro = true + + [dependencies] + qux = { path = '../qux' } + "#, + ) + .file("proc-macro-thing/src/lib.rs", "") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.1" + + [dependencies] + qux = { path = '../qux' } + "#, + ) + .file("baz/src/main.rs", "fn main() {}") + .file("qux/Cargo.toml", &basic_manifest("qux", "0.1.1")) + .file("qux/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("build -p baz").run(); + p.cargo("build").with_stderr("[FINISHED] [..]\n").run(); + p.cargo("build -p bar") + .with_stderr("[FINISHED] [..]\n") + .run(); +} + +#[cargo_test] +fn reuse_workspace_lib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.1" + + [workspace] + + [dependencies] + baz = { path = 'baz' } + "#, + ) + .file("src/lib.rs", "") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1")) + .file("baz/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("test -p baz -v --no-run") + .with_stderr( + "\ +[COMPILING] baz v0.1.1 ([..]) +[RUNNING] `rustc[..] --test [..]` +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn reuse_shared_build_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + shared = {path = "shared"} + + [workspace] + members = ["shared", "bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("shared/Cargo.toml", &basic_manifest("shared", "0.0.1")) + .file("shared/src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [build-dependencies] + shared = { path = "../shared" } + "#, + ) + .file("bar/src/lib.rs", "") + .file("bar/build.rs", "fn main() {}") + .build(); + + p.cargo("build --all").run(); + // This should not recompile! + p.cargo("build -p foo -v") + .with_stderr( + "\ +[FRESH] shared [..] +[FRESH] foo [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn changing_rustflags_is_cached() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build").run(); + p.cargo("build") + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); + // This should not recompile! + p.cargo("build") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + p.cargo("build") + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); +} + +#[cargo_test] +fn update_dependency_mtime_does_not_rebuild() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build -Z mtime-on-use") + .masquerade_as_nightly_cargo() + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); + // This does not make new files, but it does update the mtime of the dependency. + p.cargo("build -p bar -Z mtime-on-use") + .masquerade_as_nightly_cargo() + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + // This should not recompile! + p.cargo("build -Z mtime-on-use") + .masquerade_as_nightly_cargo() + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); +} + +fn fingerprint_cleaner(mut dir: PathBuf, timestamp: filetime::FileTime) { + // Cargo is experimenting with letting outside projects develop some + // limited forms of GC for target_dir. This is one of the forms. + // Specifically, Cargo is updating the mtime of a file in + // target/profile/.fingerprint each time it uses the fingerprint. + // So a cleaner can remove files associated with a fingerprint + // if all the files in the fingerprint's folder are older then a time stamp without + // effecting any builds that happened since that time stamp. + let mut cleand = false; + dir.push(".fingerprint"); + for fing in fs::read_dir(&dir).unwrap() { + let fing = fing.unwrap(); + + let outdated = |f: io::Result| { + filetime::FileTime::from_last_modification_time(&f.unwrap().metadata().unwrap()) + <= timestamp + }; + if fs::read_dir(fing.path()).unwrap().all(outdated) { + fs::remove_dir_all(fing.path()).unwrap(); + println!("remove: {:?}", fing.path()); + // a real cleaner would remove the big files in deps and build as well + // but fingerprint is sufficient for our tests + cleand = true; + } else { + } + } + assert!( + cleand, + "called fingerprint_cleaner, but there was nothing to remove" + ); +} + +#[cargo_test] +fn fingerprint_cleaner_does_not_rebuild() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build -Z mtime-on-use") + .masquerade_as_nightly_cargo() + .run(); + p.cargo("build -Z mtime-on-use") + .masquerade_as_nightly_cargo() + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); + if is_coarse_mtime() { + sleep_ms(1000); + } + let timestamp = filetime::FileTime::from_system_time(SystemTime::now()); + if is_coarse_mtime() { + sleep_ms(1000); + } + // This does not make new files, but it does update the mtime. + p.cargo("build -Z mtime-on-use") + .masquerade_as_nightly_cargo() + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + fingerprint_cleaner(p.target_debug_dir(), timestamp); + // This should not recompile! + p.cargo("build -Z mtime-on-use") + .masquerade_as_nightly_cargo() + .env("RUSTFLAGS", "-C linker=cc") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + // But this should be cleaned and so need a rebuild + p.cargo("build -Z mtime-on-use") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); +} + +#[cargo_test] +fn reuse_panic_build_dep_test() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [build-dependencies] + bar = { path = "bar" } + + [dev-dependencies] + bar = { path = "bar" } + + [profile.dev] + panic = "abort" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + // Check that `bar` is not built twice. It is only needed once (without `panic`). + p.cargo("test --lib --no-run -v") + .with_stderr( + "\ +[COMPILING] bar [..] +[RUNNING] `rustc --crate-name bar [..] +[COMPILING] foo [..] +[RUNNING] `rustc --crate-name build_script_build [..] +[RUNNING] [..]build-script-build` +[RUNNING] `rustc --crate-name foo src/lib.rs [..]--test[..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn reuse_panic_pm() { + // foo(panic) -> bar(panic) + // somepm(nopanic) -> bar(nopanic) + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { path = "bar" } + somepm = { path = "somepm" } + + [profile.dev] + panic = "abort" + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .file( + "somepm/Cargo.toml", + r#" + [package] + name = "somepm" + version = "0.0.1" + + [lib] + proc-macro = true + + [dependencies] + bar = { path = "../bar" } + "#, + ) + .file("somepm/src/lib.rs", "extern crate bar;") + .build(); + + // bar is built once without panic (for proc-macro) and once with (for the + // normal dependency). + p.cargo("build -v") + .with_stderr_unordered( + "\ +[COMPILING] bar [..] +[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C debuginfo=2 [..] +[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort -C debuginfo=2 [..] +[COMPILING] somepm [..] +[RUNNING] `rustc --crate-name somepm [..] +[COMPILING] foo [..] +[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C panic=abort[..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn bust_patched_dep() { + Package::new("registry1", "0.1.0").publish(); + Package::new("registry2", "0.1.0") + .dep("registry1", "0.1.0") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + registry2 = "0.1.0" + + [patch.crates-io] + registry1 = { path = "reg1new" } + "#, + ) + .file("src/lib.rs", "") + .file("reg1new/Cargo.toml", &basic_manifest("registry1", "0.1.0")) + .file("reg1new/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + if is_coarse_mtime() { + sleep_ms(1000); + } + + File::create(&p.root().join("reg1new/src/lib.rs")).unwrap(); + if is_coarse_mtime() { + sleep_ms(1000); + } + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] registry1 v0.1.0 ([..]) +[COMPILING] registry2 v0.1.0 +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); + + p.cargo("build -v") + .with_stderr( + "\ +[FRESH] registry1 v0.1.0 ([..]) +[FRESH] registry2 v0.1.0 +[FRESH] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn rebuild_on_mid_build_file_modification() { + let server = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = server.local_addr().unwrap(); + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["root", "proc_macro_dep"] + "#, + ) + .file( + "root/Cargo.toml", + r#" + [package] + name = "root" + version = "0.1.0" + authors = [] + + [dependencies] + proc_macro_dep = { path = "../proc_macro_dep" } + "#, + ) + .file( + "root/src/lib.rs", + r#" + #[macro_use] + extern crate proc_macro_dep; + + #[derive(Noop)] + pub struct X; + "#, + ) + .file( + "proc_macro_dep/Cargo.toml", + r#" + [package] + name = "proc_macro_dep" + version = "0.1.0" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file( + "proc_macro_dep/src/lib.rs", + &format!( + r#" + extern crate proc_macro; + + use std::io::Read; + use std::net::TcpStream; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream {{ + let mut stream = TcpStream::connect("{}").unwrap(); + let mut v = Vec::new(); + stream.read_to_end(&mut v).unwrap(); + "".parse().unwrap() + }} + "#, + addr + ), + ) + .build(); + let root = p.root(); + + let t = thread::spawn(move || { + let socket = server.accept().unwrap().0; + sleep_ms(1000); + let mut file = OpenOptions::new() + .write(true) + .append(true) + .open(root.join("root/src/lib.rs")) + .unwrap(); + writeln!(file, "// modified").expect("Failed to append to root sources"); + drop(file); + drop(socket); + drop(server.accept().unwrap()); + }); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] proc_macro_dep v0.1.0 ([..]/proc_macro_dep) +[COMPILING] root v0.1.0 ([..]/root) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] root v0.1.0 ([..]/root) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + t.join().ok().unwrap(); +} + +#[cargo_test] +fn dirty_both_lib_and_test() { + // This tests that all artifacts that depend on the results of a build + // script will get rebuilt when the build script reruns, even for separate + // commands. It does the following: + // + // 1. Project "foo" has a build script which will compile a small + // staticlib to link against. Normally this would use the `cc` crate, + // but here we just use rustc to avoid the `cc` dependency. + // 2. Build the library. + // 3. Build the unit test. The staticlib intentionally has a bad value. + // 4. Rewrite the staticlib with the correct value. + // 5. Build the library again. + // 6. Build the unit test. This should recompile. + + let slib = |n| { + format!( + r#" + #[no_mangle] + pub extern "C" fn doit() -> i32 {{ + return {}; + }} + "#, + n + ) + }; + + let p = project() + .file( + "src/lib.rs", + r#" + extern "C" { + fn doit() -> i32; + } + + #[test] + fn t1() { + assert_eq!(unsafe { doit() }, 1, "doit assert failure"); + } + "#, + ) + .file( + "build.rs", + r#" + use std::env; + use std::path::PathBuf; + use std::process::Command; + + fn main() { + let rustc = env::var_os("RUSTC").unwrap(); + let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); + assert!( + Command::new(rustc) + .args(&[ + "--crate-type=staticlib", + "--out-dir", + out_dir.to_str().unwrap(), + "slib.rs" + ]) + .status() + .unwrap() + .success(), + "slib build failed" + ); + println!("cargo:rustc-link-lib=slib"); + println!("cargo:rustc-link-search={}", out_dir.display()); + } + "#, + ) + .file("slib.rs", &slib(2)) + .build(); + + p.cargo("build").run(); + + // 2 != 1 + p.cargo("test --lib") + .with_status(101) + .with_stdout_contains("[..]doit assert failure[..]") + .run(); + + if is_coarse_mtime() { + // #5918 + sleep_ms(1000); + } + // Fix the mistake. + p.change_file("slib.rs", &slib(1)); + + p.cargo("build").run(); + // This should recompile with the new static lib, and the test should pass. + p.cargo("test --lib").run(); +} + +#[cargo_test] +fn script_fails_stay_dirty() { + // Check if a script is aborted (such as hitting Ctrl-C) that it will re-run. + // Steps: + // 1. Build to establish fingerprints. + // 2. Make a change that triggers the build script to re-run. Abort the + // script while it is running. + // 3. Run the build again and make sure it re-runs the script. + let p = project() + .file( + "build.rs", + r#" + mod helper; + fn main() { + println!("cargo:rerun-if-changed=build.rs"); + helper::doit(); + } + "#, + ) + .file("helper.rs", "pub fn doit() {}") + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + if is_coarse_mtime() { + sleep_ms(1000); + } + p.change_file("helper.rs", r#"pub fn doit() {panic!("Crash!");}"#); + p.cargo("build") + .with_stderr_contains("[..]Crash![..]") + .with_status(101) + .run(); + // There was a bug where this second call would be "fresh". + p.cargo("build") + .with_stderr_contains("[..]Crash![..]") + .with_status(101) + .run(); +} + +#[cargo_test] +fn simulated_docker_deps_stay_cached() { + // Test what happens in docker where the nanoseconds are zeroed out. + Package::new("regdep", "1.0.0").publish(); + Package::new("regdep_old_style", "1.0.0") + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .publish(); + Package::new("regdep_env", "1.0.0") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-env-changed=SOMEVAR"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("regdep_rerun", "1.0.0") + .file( + "build.rs", + r#" + fn main() { + println!("cargo:rerun-if-changed=build.rs"); + } + "#, + ) + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + pathdep = { path = "pathdep" } + regdep = "1.0" + regdep_old_style = "1.0" + regdep_env = "1.0" + regdep_rerun = "1.0" + "#, + ) + .file( + "src/lib.rs", + " + extern crate pathdep; + extern crate regdep; + extern crate regdep_old_style; + extern crate regdep_env; + extern crate regdep_rerun; + ", + ) + .file("build.rs", "fn main() {}") + .file("pathdep/Cargo.toml", &basic_manifest("pathdep", "1.0.0")) + .file("pathdep/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + let already_zero = { + // This happens on HFS with 1-second timestamp resolution, + // or other filesystems where it just so happens to write exactly on a + // 1-second boundary. + let metadata = fs::metadata(p.root().join("src/lib.rs")).unwrap(); + let mtime = FileTime::from_last_modification_time(&metadata); + mtime.nanoseconds() == 0 + }; + + // Recursively remove `nanoseconds` from every path. + fn zeropath(path: &Path) { + for entry in walkdir::WalkDir::new(path) + .into_iter() + .filter_map(|e| e.ok()) + { + let metadata = fs::metadata(entry.path()).unwrap(); + let mtime = metadata.modified().unwrap(); + let mtime_duration = mtime.duration_since(SystemTime::UNIX_EPOCH).unwrap(); + let trunc_mtime = FileTime::from_unix_time(mtime_duration.as_secs() as i64, 0); + let atime = metadata.accessed().unwrap(); + let atime_duration = atime.duration_since(SystemTime::UNIX_EPOCH).unwrap(); + let trunc_atime = FileTime::from_unix_time(atime_duration.as_secs() as i64, 0); + if let Err(e) = filetime::set_file_times(entry.path(), trunc_atime, trunc_mtime) { + // Windows doesn't allow changing filetimes on some things + // (directories, other random things I'm not sure why). Just + // ignore them. + if e.kind() == std::io::ErrorKind::PermissionDenied { + println!("PermissionDenied filetime on {:?}", entry.path()); + } else { + panic!("FileTime error on {:?}: {:?}", entry.path(), e); + } + } + } + } + zeropath(&p.root()); + zeropath(&paths::home()); + + if already_zero { + println!("already zero"); + // If it was already truncated, then everything stays fresh. + p.cargo("build -v") + .with_stderr_unordered( + "\ +[FRESH] pathdep [..] +[FRESH] regdep [..] +[FRESH] regdep_env [..] +[FRESH] regdep_old_style [..] +[FRESH] regdep_rerun [..] +[FRESH] foo [..] +[FINISHED] [..] +", + ) + .run(); + } else { + println!("not already zero"); + // It is not ideal that `foo` gets recompiled, but that is the current + // behavior. Currently mtimes are ignored for registry deps. + // + // Note that this behavior is due to the fact that `foo` has a build + // script in "old" mode where it doesn't print `rerun-if-*`. In this + // mode we use `Precalculated` to fingerprint a path dependency, where + // `Precalculated` is an opaque string which has the most recent mtime + // in it. It differs between builds because one has nsec=0 and the other + // likely has a nonzero nsec. Hence, the rebuild. + p.cargo("build -v") + .with_stderr_unordered( + "\ +[FRESH] pathdep [..] +[FRESH] regdep [..] +[FRESH] regdep_env [..] +[FRESH] regdep_old_style [..] +[FRESH] regdep_rerun [..] +[COMPILING] foo [..] +[RUNNING] [..]/foo-[..]/build-script-build[..] +[RUNNING] `rustc --crate-name foo[..] +[FINISHED] [..] +", + ) + .run(); + } +} + +#[cargo_test] +fn metadata_change_invalidates() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + for attr in &[ + "authors = [\"foo\"]", + "description = \"desc\"", + "homepage = \"https://example.com\"", + "repository =\"https://example.com\"", + ] { + let mut file = OpenOptions::new() + .write(true) + .append(true) + .open(p.root().join("Cargo.toml")) + .unwrap(); + writeln!(file, "{}", attr).unwrap(); + p.cargo("build") + .with_stderr_contains("[COMPILING] foo [..]") + .run(); + } + p.cargo("build -v") + .with_stderr_contains("[FRESH] foo[..]") + .run(); + assert_eq!(p.glob("target/debug/deps/libfoo-*.rlib").count(), 1); +} + +#[cargo_test] +fn edition_change_invalidates() { + const MANIFEST: &str = r#" + [package] + name = "foo" + version = "0.1.0" + "#; + let p = project() + .file("Cargo.toml", MANIFEST) + .file("src/lib.rs", "") + .build(); + p.cargo("build").run(); + p.change_file("Cargo.toml", &format!("{}edition = \"2018\"", MANIFEST)); + p.cargo("build") + .with_stderr_contains("[COMPILING] foo [..]") + .run(); + p.change_file( + "Cargo.toml", + &format!( + r#"{}edition = "2018" + [lib] + edition = "2015" + "#, + MANIFEST + ), + ); + p.cargo("build") + .with_stderr_contains("[COMPILING] foo [..]") + .run(); + p.cargo("build -v") + .with_stderr_contains("[FRESH] foo[..]") + .run(); + assert_eq!(p.glob("target/debug/deps/libfoo-*.rlib").count(), 1); +} + +#[cargo_test] +fn rename_with_path_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = { path = 'a' } + "#, + ) + .file("src/lib.rs", "extern crate a; pub fn foo() { a::foo(); }") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + + [dependencies] + b = { path = 'b' } + "#, + ) + .file("a/src/lib.rs", "extern crate b; pub fn foo() { b::foo() }") + .file( + "a/b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + "#, + ) + .file("a/b/src/lib.rs", "pub fn foo() { }"); + let p = p.build(); + + p.cargo("build").run(); + + // Now rename the root directory and rerun `cargo run`. Not only should we + // not build anything but we also shouldn't crash. + let mut new = p.root(); + new.pop(); + new.push("foo2"); + + fs::rename(p.root(), &new).unwrap(); + + p.cargo("build") + .cwd(&new) + .with_stderr("[FINISHED] [..]") + .run(); +} + +#[cargo_test] +fn move_target_directory_with_path_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = { path = "a" } + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + "#, + ) + .file("src/lib.rs", "extern crate a; pub use a::print_msg;") + .file( + "a/build.rs", + r###" + use std::env; + use std::fs; + use std::path::Path; + + fn main() { + println!("cargo:rerun-if-changed=build.rs"); + let out_dir = env::var("OUT_DIR").unwrap(); + let dest_path = Path::new(&out_dir).join("hello.rs"); + fs::write(&dest_path, r#" + pub fn message() -> &'static str { + "Hello, World!" + } + "#).unwrap(); + } + "###, + ) + .file( + "a/src/lib.rs", + r#" + include!(concat!(env!("OUT_DIR"), "/hello.rs")); + pub fn print_msg() { message(); } + "#, + ); + let p = p.build(); + + let mut parent = p.root(); + parent.pop(); + + p.cargo("build").run(); + + let new_target = p.root().join("target2"); + fs::rename(p.root().join("target"), &new_target).unwrap(); + + p.cargo("build") + .env("CARGO_TARGET_DIR", &new_target) + .with_stderr("[FINISHED] [..]") + .run(); +} diff --git a/tests/testsuite/generate_lockfile.rs b/tests/testsuite/generate_lockfile.rs new file mode 100644 index 00000000000..0df2c091694 --- /dev/null +++ b/tests/testsuite/generate_lockfile.rs @@ -0,0 +1,237 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use crate::support::registry::Package; +use crate::support::{basic_manifest, paths, project, ProjectBuilder}; + +#[cargo_test] +fn adding_and_removing_packages() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("generate-lockfile").run(); + + let toml = p.root().join("Cargo.toml"); + let lock1 = p.read_lockfile(); + + // add a dep + File::create(&toml) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies.bar] + path = "bar" + "#, + ) + .unwrap(); + p.cargo("generate-lockfile").run(); + let lock2 = p.read_lockfile(); + assert_ne!(lock1, lock2); + + // change the dep + File::create(&p.root().join("bar/Cargo.toml")) + .unwrap() + .write_all(basic_manifest("bar", "0.0.2").as_bytes()) + .unwrap(); + p.cargo("generate-lockfile").run(); + let lock3 = p.read_lockfile(); + assert_ne!(lock1, lock3); + assert_ne!(lock2, lock3); + + // remove the dep + println!("lock4"); + File::create(&toml) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#, + ) + .unwrap(); + p.cargo("generate-lockfile").run(); + let lock4 = p.read_lockfile(); + assert_eq!(lock1, lock4); +} + +#[cargo_test] +fn no_index_update() { + Package::new("serde", "1.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies] + serde = "1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("generate-lockfile") + .with_stderr("[UPDATING] `[..]` index") + .run(); + + p.cargo("generate-lockfile -Zno-index-update") + .masquerade_as_nightly_cargo() + .with_stdout("") + .with_stderr("") + .run(); +} + +#[cargo_test] +fn preserve_metadata() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("generate-lockfile").run(); + + let metadata = r#" +[metadata] +bar = "baz" +foo = "bar" +"#; + let lockfile = p.root().join("Cargo.lock"); + let lock = p.read_lockfile(); + let data = lock + metadata; + File::create(&lockfile) + .unwrap() + .write_all(data.as_bytes()) + .unwrap(); + + // Build and make sure the metadata is still there + p.cargo("build").run(); + let lock = p.read_lockfile(); + assert!(lock.contains(metadata.trim()), "{}", lock); + + // Update and make sure the metadata is still there + p.cargo("update").run(); + let lock = p.read_lockfile(); + assert!(lock.contains(metadata.trim()), "{}", lock); +} + +#[cargo_test] +fn preserve_line_endings_issue_2076() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + let lockfile = p.root().join("Cargo.lock"); + p.cargo("generate-lockfile").run(); + assert!(lockfile.is_file()); + p.cargo("generate-lockfile").run(); + + let lock0 = p.read_lockfile(); + + assert!(lock0.starts_with("# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.\n")); + + let lock1 = lock0.replace("\n", "\r\n"); + { + File::create(&lockfile) + .unwrap() + .write_all(lock1.as_bytes()) + .unwrap(); + } + + p.cargo("generate-lockfile").run(); + + let lock2 = p.read_lockfile(); + + assert!(lock2.starts_with("# This file is automatically @generated by Cargo.\r\n# It is not intended for manual editing.\r\n")); + assert_eq!(lock1, lock2); +} + +#[cargo_test] +fn cargo_update_generate_lockfile() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + let lockfile = p.root().join("Cargo.lock"); + assert!(!lockfile.is_file()); + p.cargo("update").with_stdout("").run(); + assert!(lockfile.is_file()); + + fs::remove_file(p.root().join("Cargo.lock")).unwrap(); + + assert!(!lockfile.is_file()); + p.cargo("update").with_stdout("").run(); + assert!(lockfile.is_file()); +} + +#[cargo_test] +fn duplicate_entries_in_lockfile() { + let _a = ProjectBuilder::new(paths::root().join("a")) + .file( + "Cargo.toml", + r#" + [package] + name = "a" + authors = [] + version = "0.0.1" + + [dependencies] + common = {path="common"} + "#, + ) + .file("src/lib.rs", "") + .build(); + + let common_toml = &basic_manifest("common", "0.0.1"); + + let _common_in_a = ProjectBuilder::new(paths::root().join("a/common")) + .file("Cargo.toml", common_toml) + .file("src/lib.rs", "") + .build(); + + let b = ProjectBuilder::new(paths::root().join("b")) + .file( + "Cargo.toml", + r#" + [package] + name = "b" + authors = [] + version = "0.0.1" + + [dependencies] + common = {path="common"} + a = {path="../a"} + "#, + ) + .file("src/lib.rs", "") + .build(); + + let _common_in_b = ProjectBuilder::new(paths::root().join("b/common")) + .file("Cargo.toml", common_toml) + .file("src/lib.rs", "") + .build(); + + // should fail due to a duplicate package `common` in the lock file + b.cargo("build") + .with_status(101) + .with_stderr_contains( + "[..]package collision in the lockfile: packages common [..] and \ + common [..] are different, but only one can be written to \ + lockfile unambiguously", + ) + .run(); +} diff --git a/tests/testsuite/git.rs b/tests/testsuite/git.rs new file mode 100644 index 00000000000..9762ea9c95c --- /dev/null +++ b/tests/testsuite/git.rs @@ -0,0 +1,2745 @@ +use git2; +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::net::{TcpListener, TcpStream}; +use std::path::Path; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Arc; +use std::thread; + +use crate::support::paths::{self, CargoPathExt}; +use crate::support::sleep_ms; +use crate::support::Project; +use crate::support::{basic_lib_manifest, basic_manifest, git, main_file, path2url, project}; + +fn disable_git_cli() -> bool { + // mingw git on Windows does not support Windows-style file URIs. + // Appveyor in the rust repo has that git up front in the PATH instead + // of Git-for-Windows, which causes this to fail. + env::var("CARGO_TEST_DISABLE_GIT_CLI") == Ok("1".to_string()) +} + +#[cargo_test] +fn cargo_compile_simple_git_dep() { + let project = project(); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_lib_manifest("dep1")) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }) + .unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + let git_root = git_project.root(); + + project + .cargo("build") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(&git_root), + path2url(&git_root), + )) + .run(); + + assert!(project.bin("foo").is_file()); + + project + .process(&project.bin("foo")) + .with_stdout("hello world\n") + .run(); +} + +#[cargo_test] +fn cargo_compile_git_dep_branch() { + let project = project(); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_lib_manifest("dep1")) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }) + .unwrap(); + + // Make a new branch based on the current HEAD commit + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + let head = repo.find_commit(head).unwrap(); + repo.branch("branchy", &head, true).unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + branch = "branchy" + + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + let git_root = git_project.root(); + + project + .cargo("build") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(&git_root), + path2url(&git_root), + )) + .run(); + + assert!(project.bin("foo").is_file()); + + project + .process(&project.bin("foo")) + .with_stdout("hello world\n") + .run(); +} + +#[cargo_test] +fn cargo_compile_git_dep_tag() { + let project = project(); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_lib_manifest("dep1")) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }) + .unwrap(); + + // Make a tag corresponding to the current HEAD + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + repo.tag( + "v0.1.0", + &repo.find_object(head, None).unwrap(), + &repo.signature().unwrap(), + "make a new tag", + false, + ) + .unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + tag = "v0.1.0" + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + let git_root = git_project.root(); + + project + .cargo("build") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + path2url(&git_root), + path2url(&git_root), + )) + .run(); + + assert!(project.bin("foo").is_file()); + + project + .process(&project.bin("foo")) + .with_stdout("hello world\n") + .run(); + + project.cargo("build").run(); +} + +#[cargo_test] +fn cargo_compile_with_nested_paths() { + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + + [dependencies.dep2] + + version = "0.5.0" + path = "vendor/dep2" + + [lib] + + name = "dep1" + "#, + ) + .file( + "src/dep1.rs", + r#" + extern crate dep2; + + pub fn hello() -> &'static str { + dep2::hello() + } + "#, + ) + .file("vendor/dep2/Cargo.toml", &basic_lib_manifest("dep2")) + .file( + "vendor/dep2/src/dep2.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "foo" + "#, + git_project.url() + ), + ) + .file( + "src/foo.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("hello world\n").run(); +} + +#[cargo_test] +fn cargo_compile_with_malformed_nested_paths() { + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_lib_manifest("dep1")) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + .file("vendor/dep2/Cargo.toml", "!INVALID!") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "foo" + "#, + git_project.url() + ), + ) + .file( + "src/foo.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("hello world\n").run(); +} + +#[cargo_test] +fn cargo_compile_with_meta_package() { + let git_project = git::new("meta-dep", |project| { + project + .file("dep1/Cargo.toml", &basic_lib_manifest("dep1")) + .file( + "dep1/src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "this is dep1" + } + "#, + ) + .file("dep2/Cargo.toml", &basic_lib_manifest("dep2")) + .file( + "dep2/src/dep2.rs", + r#" + pub fn hello() -> &'static str { + "this is dep2" + } + "#, + ) + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + version = "0.5.0" + git = '{}' + + [dependencies.dep2] + + version = "0.5.0" + git = '{}' + + [[bin]] + + name = "foo" + "#, + git_project.url(), + git_project.url() + ), + ) + .file( + "src/foo.rs", + &main_file( + r#""{} {}", dep1::hello(), dep2::hello()"#, + &["dep1", "dep2"], + ), + ) + .build(); + + p.cargo("build").run(); + + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")) + .with_stdout("this is dep1 this is dep2\n") + .run(); +} + +#[cargo_test] +fn cargo_compile_with_short_ssh_git() { + let url = "git@github.com:a/dep"; + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep] + + git = "{}" + + [[bin]] + + name = "foo" + "#, + url + ), + ) + .file( + "src/foo.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stdout("") + .with_stderr(&format!( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + invalid url `{}`: relative URL without a base +", + url + )) + .run(); +} + +#[cargo_test] +fn two_revs_same_deps() { + let bar = git::new("meta-dep", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.0.0")) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }) + .unwrap(); + + let repo = git2::Repository::open(&bar.root()).unwrap(); + let rev1 = repo.revparse_single("HEAD").unwrap().id(); + + // Commit the changes and make sure we trigger a recompile + File::create(&bar.root().join("src/lib.rs")) + .unwrap() + .write_all(br#"pub fn bar() -> i32 { 2 }"#) + .unwrap(); + git::add(&repo); + let rev2 = git::commit(&repo); + + let foo = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + rev = "{}" + + [dependencies.baz] + path = "../baz" + "#, + bar.url(), + rev1 + ), + ) + .file( + "src/main.rs", + r#" + extern crate bar; + extern crate baz; + + fn main() { + assert_eq!(bar::bar(), 1); + assert_eq!(baz::baz(), 2); + } + "#, + ) + .build(); + + let _baz = project() + .at("baz") + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "baz" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + rev = "{}" + "#, + bar.url(), + rev2 + ), + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn baz() -> i32 { bar::bar() } + "#, + ) + .build(); + + foo.cargo("build -v").run(); + assert!(foo.bin("foo").is_file()); + foo.process(&foo.bin("foo")).run(); +} + +#[cargo_test] +fn recompilation() { + let git_project = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_lib_manifest("bar")) + .file("src/bar.rs", "pub fn bar() {}") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + git = '{}' + "#, + git_project.url() + ), + ) + .file("src/main.rs", &main_file(r#""{:?}", bar::bar()"#, &["bar"])) + .build(); + + // First time around we should compile both foo and bar + p.cargo("build") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + git_project.url(), + git_project.url(), + )) + .run(); + + // Don't recompile the second time + p.cargo("build").with_stdout("").run(); + + // Modify a file manually, shouldn't trigger a recompile + File::create(&git_project.root().join("src/bar.rs")) + .unwrap() + .write_all(br#"pub fn bar() { println!("hello!"); }"#) + .unwrap(); + + p.cargo("build").with_stdout("").run(); + + p.cargo("update") + .with_stderr(&format!( + "[UPDATING] git repository `{}`", + git_project.url() + )) + .run(); + + p.cargo("build").with_stdout("").run(); + + // Commit the changes and make sure we don't trigger a recompile because the + // lock file says not to change + let repo = git2::Repository::open(&git_project.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + println!("compile after commit"); + p.cargo("build").with_stdout("").run(); + p.root().move_into_the_past(); + + // Update the dependency and carry on! + p.cargo("update") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ + ", + git_project.url() + )) + .run(); + println!("going for the last compile"); + p.cargo("build") + .with_stderr(&format!( + "[COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + git_project.url(), + )) + .run(); + + // Make sure clean only cleans one dep + p.cargo("clean -p foo").with_stdout("").run(); + p.cargo("build") + .with_stderr( + "[COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]", + ) + .run(); +} + +#[cargo_test] +fn update_with_shared_deps() { + let git_project = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_lib_manifest("bar")) + .file("src/bar.rs", "pub fn bar() {}") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + path = "dep1" + [dependencies.dep2] + path = "dep2" + "#, + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate dep1; + #[allow(unused_extern_crates)] + extern crate dep2; + fn main() {} + "#, + ) + .file( + "dep1/Cargo.toml", + &format!( + r#" + [package] + name = "dep1" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + version = "0.5.0" + git = '{}' + "#, + git_project.url() + ), + ) + .file("dep1/src/lib.rs", "") + .file( + "dep2/Cargo.toml", + &format!( + r#" + [package] + name = "dep2" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + version = "0.5.0" + git = '{}' + "#, + git_project.url() + ), + ) + .file("dep2/src/lib.rs", "") + .build(); + + // First time around we should compile both foo and bar + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] git repository `{git}` +[COMPILING] bar v0.5.0 ({git}#[..]) +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + git = git_project.url(), + )) + .run(); + + // Modify a file manually, and commit it + File::create(&git_project.root().join("src/bar.rs")) + .unwrap() + .write_all(br#"pub fn bar() { println!("hello!"); }"#) + .unwrap(); + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let old_head = repo.head().unwrap().target().unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + + // By default, not transitive updates + println!("dep1 update"); + p.cargo("update -p dep1").with_stdout("").run(); + + // Don't do anything bad on a weird --precise argument + println!("bar bad precise update"); + p.cargo("update -p bar --precise 0.1.2") + .with_status(101) + .with_stderr( + "\ +[UPDATING] git repository [..] +[ERROR] Unable to update [..] + +Caused by: + revspec '0.1.2' not found; [..] +", + ) + .run(); + + // Specifying a precise rev to the old rev shouldn't actually update + // anything because we already have the rev in the db. + println!("bar precise update"); + p.cargo("update -p bar --precise") + .arg(&old_head.to_string()) + .with_stdout("") + .run(); + + // Updating aggressively should, however, update the repo. + println!("dep1 aggressive update"); + p.cargo("update -p dep1 --aggressive") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ + ", + git_project.url() + )) + .run(); + + // Make sure we still only compile one version of the git repo + println!("build"); + p.cargo("build") + .with_stderr(&format!( + "\ +[COMPILING] bar v0.5.0 ({git}#[..]) +[COMPILING] [..] v0.5.0 ([CWD][..]dep[..]) +[COMPILING] [..] v0.5.0 ([CWD][..]dep[..]) +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + git = git_project.url(), + )) + .run(); + + // We should be able to update transitive deps + p.cargo("update -p bar") + .with_stderr(&format!( + "[UPDATING] git repository `{}`", + git_project.url() + )) + .run(); +} + +#[cargo_test] +fn dep_with_submodule() { + let project = project(); + let git_project = git::new("dep1", |project| { + project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + }) + .unwrap(); + let git_project2 = + git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let url = path2url(git_project2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("src")); + git::commit(&repo); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/lib.rs", + "extern crate dep1; pub fn foo() { dep1::dep() }", + ) + .build(); + + project + .cargo("build") + .with_stderr( + "\ +[UPDATING] git repository [..] +[COMPILING] dep1 [..] +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn dep_with_bad_submodule() { + let project = project(); + let git_project = git::new("dep1", |project| { + project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + }) + .unwrap(); + let git_project2 = + git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")).unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let url = path2url(git_project2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("src")); + git::commit(&repo); + + // now amend the first commit on git_project2 to make submodule ref point to not-found + // commit + let repo = git2::Repository::open(&git_project2.root()).unwrap(); + let original_submodule_ref = repo.refname_to_id("refs/heads/master").unwrap(); + let commit = repo.find_commit(original_submodule_ref).unwrap(); + commit + .amend( + Some("refs/heads/master"), + None, + None, + None, + Some("something something"), + None, + ) + .unwrap(); + + let p = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/lib.rs", + "extern crate dep1; pub fn foo() { dep1::dep() }", + ) + .build(); + + let expected = format!( + "\ +[UPDATING] git repository [..] +[ERROR] failed to load source for a dependency on `dep1` + +Caused by: + Unable to update {} + +Caused by: + failed to update submodule `src` + +Caused by: + object not found - no match for id [..] +", + path2url(git_project.root()) + ); + + p.cargo("build") + .with_stderr(expected) + .with_status(101) + .run(); +} + +#[cargo_test] +fn two_deps_only_update_one() { + let project = project(); + let git1 = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + let git2 = git::new("dep2", |project| { + project + .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + git = '{}' + [dependencies.dep2] + git = '{}' + "#, + git1.url(), + git2.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + fn oid_to_short_sha(oid: git2::Oid) -> String { + oid.to_string()[..8].to_string() + } + fn git_repo_head_sha(p: &Project) -> String { + let repo = git2::Repository::open(p.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + oid_to_short_sha(head) + } + + println!("dep1 head sha: {}", git_repo_head_sha(&git1)); + println!("dep2 head sha: {}", git_repo_head_sha(&git2)); + + p.cargo("build") + .with_stderr( + "[UPDATING] git repository `[..]`\n\ + [UPDATING] git repository `[..]`\n\ + [COMPILING] [..] v0.5.0 ([..])\n\ + [COMPILING] [..] v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + ) + .run(); + + File::create(&git1.root().join("src/lib.rs")) + .unwrap() + .write_all(br#"pub fn foo() {}"#) + .unwrap(); + let repo = git2::Repository::open(&git1.root()).unwrap(); + git::add(&repo); + let oid = git::commit(&repo); + println!("dep1 head sha: {}", oid_to_short_sha(oid)); + + p.cargo("update -p dep1") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ + ", + git1.url() + )) + .run(); +} + +#[cargo_test] +fn stale_cached_version() { + let bar = git::new("meta-dep", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.0.0")) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }) + .unwrap(); + + // Update the git database in the cache with the current state of the git + // repo + let foo = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file( + "src/main.rs", + r#" + extern crate bar; + + fn main() { assert_eq!(bar::bar(), 1) } + "#, + ) + .build(); + + foo.cargo("build").run(); + foo.process(&foo.bin("foo")).run(); + + // Update the repo, and simulate someone else updating the lock file and then + // us pulling it down. + File::create(&bar.root().join("src/lib.rs")) + .unwrap() + .write_all(br#"pub fn bar() -> i32 { 1 + 0 }"#) + .unwrap(); + let repo = git2::Repository::open(&bar.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + + let rev = repo.revparse_single("HEAD").unwrap().id(); + + File::create(&foo.root().join("Cargo.lock")) + .unwrap() + .write_all( + format!( + r#" + [[package]] + name = "foo" + version = "0.0.0" + dependencies = [ + 'bar 0.0.0 (git+{url}#{hash})' + ] + + [[package]] + name = "bar" + version = "0.0.0" + source = 'git+{url}#{hash}' + "#, + url = bar.url(), + hash = rev + ) + .as_bytes(), + ) + .unwrap(); + + // Now build! + foo.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] bar v0.0.0 ({bar}#[..]) +[COMPILING] foo v0.0.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + bar = bar.url(), + )) + .run(); + foo.process(&foo.bin("foo")).run(); +} + +#[cargo_test] +fn dep_with_changed_submodule() { + let project = project(); + let git_project = git::new("dep1", |project| { + project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + }) + .unwrap(); + + let git_project2 = git::new("dep2", |project| { + project.file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }") + }) + .unwrap(); + + let git_project3 = git::new("dep3", |project| { + project.file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }") + }) + .unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src")); + git::commit(&repo); + + let p = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + [dependencies.dep1] + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + " + extern crate dep1; + pub fn main() { println!(\"{}\", dep1::dep()) } + ", + ) + .build(); + + println!("first run"); + p.cargo("run") + .with_stderr( + "[UPDATING] git repository `[..]`\n\ + [COMPILING] dep1 v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n\ + [RUNNING] `target/debug/foo[EXE]`\n", + ) + .with_stdout("project2\n") + .run(); + + File::create(&git_project.root().join(".gitmodules")) + .unwrap() + .write_all( + format!( + "[submodule \"src\"]\n\tpath = src\n\turl={}", + git_project3.url() + ) + .as_bytes(), + ) + .unwrap(); + + // Sync the submodule and reset it to the new remote. + sub.sync().unwrap(); + { + let subrepo = sub.open().unwrap(); + subrepo + .remote_add_fetch("origin", "refs/heads/*:refs/heads/*") + .unwrap(); + subrepo + .remote_set_url("origin", &git_project3.url().to_string()) + .unwrap(); + let mut origin = subrepo.find_remote("origin").unwrap(); + origin.fetch(&[], None, None).unwrap(); + let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap(); + let obj = subrepo.find_object(id, None).unwrap(); + subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap(); + } + sub.add_to_index(true).unwrap(); + git::add(&repo); + git::commit(&repo); + + sleep_ms(1000); + // Update the dependency and carry on! + println!("update"); + p.cargo("update -v") + .with_stderr("") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ + ", + git_project.url() + )) + .run(); + + println!("last run"); + p.cargo("run") + .with_stderr( + "[COMPILING] dep1 v0.5.0 ([..])\n\ + [COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n\ + [RUNNING] `target/debug/foo[EXE]`\n", + ) + .with_stdout("project3\n") + .run(); +} + +#[cargo_test] +fn dev_deps_with_testing() { + let p2 = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file( + "src/lib.rs", + r#" + pub fn gimme() -> &'static str { "zoidberg" } + "#, + ) + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + version = "0.5.0" + git = '{}' + "#, + p2.url() + ), + ) + .file( + "src/main.rs", + r#" + fn main() {} + + #[cfg(test)] + mod tests { + extern crate bar; + #[test] fn foo() { bar::gimme(); } + } + "#, + ) + .build(); + + // Generate a lock file which did not use `bar` to compile, but had to update + // `bar` to generate the lock file + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + bar = p2.url() + )) + .run(); + + // Make sure we use the previous resolution of `bar` instead of updating it + // a second time. + p.cargo("test") + .with_stderr( + "\ +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test tests::foo ... ok") + .run(); +} + +#[cargo_test] +fn git_build_cmd_freshness() { + let foo = git::new("foo", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + .file(".gitignore", "src/bar.rs") + }) + .unwrap(); + foo.root().move_into_the_past(); + + sleep_ms(1000); + + foo.cargo("build") + .with_stderr( + "\ +[COMPILING] foo v0.0.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + // Smoke test to make sure it doesn't compile again + println!("first pass"); + foo.cargo("build").with_stdout("").run(); + + // Modify an ignored file and make sure we don't rebuild + println!("second pass"); + File::create(&foo.root().join("src/bar.rs")).unwrap(); + foo.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn git_name_not_always_needed() { + let p2 = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file( + "src/lib.rs", + r#" + pub fn gimme() -> &'static str { "zoidberg" } + "#, + ) + }) + .unwrap(); + + let repo = git2::Repository::open(&p2.root()).unwrap(); + let mut cfg = repo.config().unwrap(); + let _ = cfg.remove("user.name"); + let _ = cfg.remove("user.email"); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.bar] + git = '{}' + "#, + p2.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // Generate a lock file which did not use `bar` to compile, but had to update + // `bar` to generate the lock file + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + bar = p2.url() + )) + .run(); +} + +#[cargo_test] +fn git_repo_changing_no_rebuild() { + let bar = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }) + .unwrap(); + + // Lock p1 to the first rev in the git repo + let p1 = project() + .at("p1") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "p1" + version = "0.5.0" + authors = [] + build = 'build.rs' + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .file("build.rs", "fn main() {}") + .build(); + p1.root().move_into_the_past(); + p1.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + bar = bar.url() + )) + .run(); + + // Make a commit to lock p2 to a different rev + File::create(&bar.root().join("src/lib.rs")) + .unwrap() + .write_all(br#"pub fn bar() -> i32 { 2 }"#) + .unwrap(); + let repo = git2::Repository::open(&bar.root()).unwrap(); + git::add(&repo); + git::commit(&repo); + + // Lock p2 to the second rev + let p2 = project() + .at("p2") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "p2" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + p2.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] git repository `{bar}` +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + bar = bar.url() + )) + .run(); + + // And now for the real test! Make sure that p1 doesn't get rebuilt + // even though the git repo has changed. + p1.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn git_dep_build_cmd() { + let p = git::new("foo", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + + [[bin]] + + name = "foo" + "#, + ) + .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [lib] + name = "bar" + path = "src/bar.rs" + "#, + ) + .file( + "bar/src/bar.rs.in", + r#" + pub fn gimme() -> i32 { 0 } + "#, + ) + .file( + "bar/build.rs", + r#" + use std::fs; + fn main() { + fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); + } + "#, + ) + }) + .unwrap(); + + p.root().join("bar").move_into_the_past(); + + p.cargo("build").run(); + + p.process(&p.bin("foo")).with_stdout("0\n").run(); + + // Touching bar.rs.in should cause the `build` command to run again. + fs::File::create(&p.root().join("bar/src/bar.rs.in")) + .unwrap() + .write_all(b"pub fn gimme() -> i32 { 1 }") + .unwrap(); + + p.cargo("build").run(); + + p.process(&p.bin("foo")).with_stdout("1\n").run(); +} + +#[cargo_test] +fn fetch_downloads() { + let bar = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + p.cargo("fetch") + .with_stderr(&format!( + "[UPDATING] git repository `{url}`", + url = bar.url() + )) + .run(); + + p.cargo("fetch").with_stdout("").run(); +} + +#[cargo_test] +fn warnings_in_git_dep() { + let bar = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "fn unused() {}") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + [COMPILING] bar v0.5.0 ({}#[..])\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", + bar.url(), + bar.url(), + )) + .run(); +} + +#[cargo_test] +fn update_ambiguous() { + let bar1 = git::new("bar1", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + let bar2 = git::new("bar2", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.6.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + let baz = git::new("baz", |project| { + project + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "baz" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + git = '{}' + "#, + bar2.url() + ), + ) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + [dependencies.baz] + git = '{}' + "#, + bar1.url(), + baz.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("generate-lockfile").run(); + p.cargo("update -p bar") + .with_status(101) + .with_stderr( + "\ +[ERROR] There are multiple `bar` packages in your project, and the specification `bar` \ +is ambiguous. +Please re-run this command with `-p ` where `` is one of the \ +following: + bar:0.[..].0 + bar:0.[..].0 +", + ) + .run(); +} + +#[cargo_test] +fn update_one_dep_in_repo_with_many_deps() { + let bar = git::new("bar", |project| { + project + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) + .file("a/src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + [dependencies.a] + git = '{}' + "#, + bar.url(), + bar.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("generate-lockfile").run(); + p.cargo("update -p bar") + .with_stderr(&format!("[UPDATING] git repository `{}`", bar.url())) + .run(); +} + +#[cargo_test] +fn switch_deps_does_not_update_transitive() { + let transitive = git::new("transitive", |project| { + project + .file("Cargo.toml", &basic_manifest("transitive", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + let dep1 = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "dep" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.transitive] + git = '{}' + "#, + transitive.url() + ), + ) + .file("src/lib.rs", "") + }) + .unwrap(); + let dep2 = git::new("dep2", |project| { + project + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "dep" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.transitive] + git = '{}' + "#, + transitive.url() + ), + ) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, + dep1.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] git repository `{}` +[UPDATING] git repository `{}` +[COMPILING] transitive [..] +[COMPILING] dep [..] +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dep1.url(), + transitive.url() + )) + .run(); + + // Update the dependency to point to the second repository, but this + // shouldn't update the transitive dependency which is the same. + File::create(&p.root().join("Cargo.toml")) + .unwrap() + .write_all( + format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, + dep2.url() + ) + .as_bytes(), + ) + .unwrap(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] git repository `{}` +[COMPILING] dep [..] +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + dep2.url() + )) + .run(); +} + +#[cargo_test] +fn update_one_source_updates_all_packages_in_that_git_source() { + let dep = git::new("dep", |project| { + project + .file( + "Cargo.toml", + r#" + [package] + name = "dep" + version = "0.5.0" + authors = [] + + [dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) + .file("a/src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.dep] + git = '{}' + "#, + dep.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); + + let repo = git2::Repository::open(&dep.root()).unwrap(); + let rev1 = repo.revparse_single("HEAD").unwrap().id(); + + // Just be sure to change a file + File::create(&dep.root().join("src/lib.rs")) + .unwrap() + .write_all(br#"pub fn bar() -> i32 { 2 }"#) + .unwrap(); + git::add(&repo); + git::commit(&repo); + + p.cargo("update -p dep").run(); + let mut lockfile = String::new(); + File::open(&p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lockfile) + .unwrap(); + assert!( + !lockfile.contains(&rev1.to_string()), + "{} in {}", + rev1, + lockfile + ); +} + +#[cargo_test] +fn switch_sources() { + let a1 = git::new("a1", |project| { + project + .file("Cargo.toml", &basic_manifest("a", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + let a2 = git::new("a2", |project| { + project + .file("Cargo.toml", &basic_manifest("a", "0.5.1")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies.b] + path = "b" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "b/Cargo.toml", + &format!( + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies.a] + git = '{}' + "#, + a1.url() + ), + ) + .file("b/src/lib.rs", "pub fn main() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] git repository `file://[..]a1` +[COMPILING] a v0.5.0 ([..]a1#[..] +[COMPILING] b v0.5.0 ([..]) +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + File::create(&p.root().join("b/Cargo.toml")) + .unwrap() + .write_all( + format!( + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies.a] + git = '{}' + "#, + a2.url() + ) + .as_bytes(), + ) + .unwrap(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] git repository `file://[..]a2` +[COMPILING] a v0.5.1 ([..]a2#[..] +[COMPILING] b v0.5.0 ([..]) +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn dont_require_submodules_are_checked_out() { + let p = project().build(); + let git1 = git::new("dep1", |p| { + p.file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("a/foo", "") + }) + .unwrap(); + let git2 = git::new("dep2", |p| p).unwrap(); + + let repo = git2::Repository::open(&git1.root()).unwrap(); + let url = path2url(git2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("a/submodule")); + git::commit(&repo); + + git2::Repository::init(&p.root()).unwrap(); + let url = path2url(git1.root()).to_string(); + let dst = paths::home().join("foo"); + git2::Repository::clone(&url, &dst).unwrap(); + + git1.cargo("build -v").cwd(&dst).run(); +} + +#[cargo_test] +fn doctest_same_name() { + let a2 = git::new("a2", |p| { + p.file("Cargo.toml", &basic_manifest("a", "0.5.0")) + .file("src/lib.rs", "pub fn a2() {}") + }) + .unwrap(); + + let a1 = git::new("a1", |p| { + p.file( + "Cargo.toml", + &format!( + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + [dependencies] + a = {{ git = '{}' }} + "#, + a2.url() + ), + ) + .file("src/lib.rs", "extern crate a; pub fn a1() {}") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, + a1.url() + ), + ) + .file( + "src/lib.rs", + r#" + #[macro_use] + extern crate a; + "#, + ) + .build(); + + p.cargo("test -v").run(); +} + +#[cargo_test] +fn lints_are_suppressed() { + let a = git::new("a", |p| { + p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file( + "src/lib.rs", + " + use std::option; + ", + ) + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] git repository `[..]` +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn denied_lints_are_allowed() { + let a = git::new("a", |p| { + p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file( + "src/lib.rs", + " + #![deny(warnings)] + use std::option; + ", + ) + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ git = '{}' }} + "#, + a.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] git repository `[..]` +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn add_a_git_dep() { + let git = git::new("git", |p| { + p.file("Cargo.toml", &basic_manifest("git", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = {{ path = 'a' }} + git = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + File::create(p.root().join("a/Cargo.toml")) + .unwrap() + .write_all( + format!( + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + git = {{ git = '{}' }} + "#, + git.url() + ) + .as_bytes(), + ) + .unwrap(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn two_at_rev_instead_of_tag() { + let git = git::new("git", |p| { + p.file("Cargo.toml", &basic_manifest("git1", "0.5.0")) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("git2", "0.5.0")) + .file("a/src/lib.rs", "") + }) + .unwrap(); + + // Make a tag corresponding to the current HEAD + let repo = git2::Repository::open(&git.root()).unwrap(); + let head = repo.head().unwrap().target().unwrap(); + repo.tag( + "v0.1.0", + &repo.find_object(head, None).unwrap(), + &repo.signature().unwrap(), + "make a new tag", + false, + ) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + git1 = {{ git = '{0}', rev = 'v0.1.0' }} + git2 = {{ git = '{0}', rev = 'v0.1.0' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("generate-lockfile").run(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn include_overrides_gitignore() { + // Make sure that `package.include` takes precedence over .gitignore. + let p = git::new("foo", |repo| { + repo.file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + include = ["src/lib.rs", "ignored.txt", "Cargo.toml"] + "#, + ) + .file( + ".gitignore", + r#" + /target + Cargo.lock + ignored.txt + "#, + ) + .file("src/lib.rs", "") + .file("ignored.txt", "") + .file("build.rs", "fn main() {}") + }) + .unwrap(); + + p.cargo("build").run(); + p.change_file("ignored.txt", "Trigger rebuild."); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([..]) +[RUNNING] `[..]build-script-build[..]` +[RUNNING] `rustc --crate-name foo src/lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("package --list --allow-dirty") + .with_stdout( + "\ +Cargo.toml +ignored.txt +src/lib.rs +", + ) + .run(); +} + +#[cargo_test] +fn invalid_git_dependency_manifest() { + let project = project(); + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + + name = "dep1" + version = "0.5.0" + authors = ["carlhuda@example.com"] + categories = ["algorithms"] + categories = ["algorithms"] + + [lib] + + name = "dep1" + "#, + ) + .file( + "src/dep1.rs", + r#" + pub fn hello() -> &'static str { + "hello world" + } + "#, + ) + }) + .unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.dep1] + + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""{}", dep1::hello()"#, &["dep1"]), + ) + .build(); + + let git_root = git_project.root(); + + project + .cargo("build") + .with_status(101) + .with_stderr(&format!( + "[UPDATING] git repository `{}`\n\ + error: failed to load source for a dependency on `dep1`\n\ + \n\ + Caused by:\n \ + Unable to update {}\n\ + \n\ + Caused by:\n \ + failed to parse manifest at `[..]`\n\ + \n\ + Caused by:\n \ + could not parse input as TOML\n\ + \n\ + Caused by:\n \ + duplicate key: `categories` for key `project`", + path2url(&git_root), + path2url(&git_root), + )) + .run(); +} + +#[cargo_test] +fn failed_submodule_checkout() { + let project = project(); + let git_project = git::new("dep1", |project| { + project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + }) + .unwrap(); + + let git_project2 = git::new("dep2", |project| project.file("lib.rs", "")).unwrap(); + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = listener.local_addr().unwrap(); + let done = Arc::new(AtomicBool::new(false)); + let done2 = done.clone(); + + let t = thread::spawn(move || { + while !done2.load(Ordering::SeqCst) { + if let Ok((mut socket, _)) = listener.accept() { + drop(socket.write_all(b"foo\r\n")); + } + } + }); + + let repo = git2::Repository::open(&git_project2.root()).unwrap(); + let url = format!("https://{}:{}/", addr.ip(), addr.port()); + { + let mut s = repo.submodule(&url, Path::new("bar"), false).unwrap(); + let subrepo = s.open().unwrap(); + let mut cfg = subrepo.config().unwrap(); + cfg.set_str("user.email", "foo@bar.com").unwrap(); + cfg.set_str("user.name", "Foo Bar").unwrap(); + git::commit(&subrepo); + s.add_finalize().unwrap(); + } + git::commit(&repo); + drop((repo, url)); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let url = path2url(git_project2.root()).to_string(); + git::add_submodule(&repo, &url, Path::new("src")); + git::commit(&repo); + drop(repo); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + dep1 = {{ git = '{}' }} + "#, + git_project.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + project + .cargo("build") + .with_status(101) + .with_stderr_contains(" failed to update submodule `src`") + .with_stderr_contains(" failed to update submodule `bar`") + .run(); + project + .cargo("build") + .with_status(101) + .with_stderr_contains(" failed to update submodule `src`") + .with_stderr_contains(" failed to update submodule `bar`") + .run(); + + done.store(true, Ordering::SeqCst); + drop(TcpStream::connect(&addr)); + t.join().unwrap(); +} + +#[cargo_test] +fn use_the_cli() { + if disable_git_cli() { + return; + } + let project = project(); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let project = project + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + dep1 = {{ git = '{}' }} + "#, + git_project.url() + ), + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + " + [net] + git-fetch-with-cli = true + ", + ) + .build(); + + let stderr = "\ +[UPDATING] git repository `[..]` +[RUNNING] `git fetch [..]` +[COMPILING] dep1 [..] +[RUNNING] `rustc [..]` +[COMPILING] foo [..] +[RUNNING] `rustc [..]` +[FINISHED] [..] +"; + + project.cargo("build -v").with_stderr(stderr).run(); +} + +#[cargo_test] +fn templatedir_doesnt_cause_problems() { + let git_project2 = git::new("dep2", |project| { + project + .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "fo" + version = "0.5.0" + authors = [] + + [dependencies] + dep1 = {{ git = '{}' }} + "#, + git_project.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + File::create(paths::home().join(".gitconfig")) + .unwrap() + .write_all( + format!( + r#" + [init] + templatedir = {} + "#, + git_project2 + .url() + .to_file_path() + .unwrap() + .to_str() + .unwrap() + .replace("\\", "/") + ) + .as_bytes(), + ) + .unwrap(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn git_with_cli_force() { + if disable_git_cli() { + return; + } + // Supports a force-pushed repo. + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_lib_manifest("dep1")) + .file("src/lib.rs", r#"pub fn f() { println!("one"); }"#) + }) + .unwrap(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + edition = "2018" + + [dependencies] + dep1 = {{ git = "{}" }} + "#, + git_project.url() + ), + ) + .file("src/main.rs", "fn main() { dep1::f(); }") + .file( + ".cargo/config", + " + [net] + git-fetch-with-cli = true + ", + ) + .build(); + p.cargo("build").run(); + p.rename_run("foo", "foo1").with_stdout("one").run(); + + // commit --amend a change that will require a force fetch. + let repo = git2::Repository::open(&git_project.root()).unwrap(); + git_project.change_file("src/lib.rs", r#"pub fn f() { println!("two"); }"#); + git::add(&repo); + let id = repo.refname_to_id("HEAD").unwrap(); + let commit = repo.find_commit(id).unwrap(); + let tree_id = t!(t!(repo.index()).write_tree()); + t!(commit.amend( + Some("HEAD"), + None, + None, + None, + None, + Some(&t!(repo.find_tree(tree_id))) + )); + // Perform the fetch. + p.cargo("update").run(); + p.cargo("build").run(); + p.rename_run("foo", "foo2").with_stdout("two").run(); +} + +#[cargo_test] +fn git_fetch_cli_env_clean() { + if disable_git_cli() { + return; + } + // This tests that git-fetch-with-cli works when GIT_DIR environment + // variable is set (for whatever reason). + let git_dep = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let git_proj = git::new("foo", |project| { + project + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + [dependencies] + dep1 = {{ git = '{}' }} + "#, + git_dep.url() + ), + ) + .file("src/lib.rs", "pub extern crate dep1;") + .file( + ".cargo/config", + " + [net] + git-fetch-with-cli = true + ", + ) + }) + .unwrap(); + + // The directory set here isn't too important. Pointing to our own git + // directory causes git to be confused and fail. Can also point to an + // empty directory, or a nonexistent one. + git_proj + .cargo("fetch") + .env("GIT_DIR", git_proj.root().join(".git")) + .run(); +} diff --git a/tests/testsuite/init.rs b/tests/testsuite/init.rs new file mode 100644 index 00000000000..84e0b9d3691 --- /dev/null +++ b/tests/testsuite/init.rs @@ -0,0 +1,632 @@ +use crate::support; +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::process::Command; + +use crate::support::{paths, Execs}; + +fn cargo_process(s: &str) -> Execs { + let mut execs = support::cargo_process(s); + execs.cwd(&paths::root()).env("HOME", &paths::home()); + execs +} + +fn mercurial_available() -> bool { + let result = Command::new("hg") + .arg("--version") + .output() + .map(|o| o.status.success()) + .unwrap_or(false); + if !result { + println!("`hg` not available, skipping test"); + } + result +} + +#[cargo_test] +fn simple_lib() { + cargo_process("init --lib --vcs none --edition 2015") + .env("USER", "foo") + .with_stderr("[CREATED] library package") + .run(); + + assert!(paths::root().join("Cargo.toml").is_file()); + assert!(paths::root().join("src/lib.rs").is_file()); + assert!(!paths::root().join(".gitignore").is_file()); + + cargo_process("build").run(); +} + +#[cargo_test] +fn simple_bin() { + let path = paths::root().join("foo"); + fs::create_dir(&path).unwrap(); + cargo_process("init --bin --vcs none --edition 2015") + .env("USER", "foo") + .cwd(&path) + .with_stderr("[CREATED] binary (application) package") + .run(); + + assert!(paths::root().join("foo/Cargo.toml").is_file()); + assert!(paths::root().join("foo/src/main.rs").is_file()); + + cargo_process("build").cwd(&path).run(); + assert!(paths::root() + .join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)) + .is_file()); +} + +#[cargo_test] +fn simple_git_ignore_exists() { + // write a .gitignore file with one entry + fs::create_dir_all(paths::root().join("foo")).unwrap(); + fs::write( + paths::root().join("foo/.gitignore"), + "/target\n**/some.file", + ) + .unwrap(); + + cargo_process("init --lib foo --edition 2015") + .env("USER", "foo") + .run(); + + assert!(paths::root().is_dir()); + assert!(paths::root().join("foo/Cargo.toml").is_file()); + assert!(paths::root().join("foo/src/lib.rs").is_file()); + assert!(paths::root().join("foo/.git").is_dir()); + assert!(paths::root().join("foo/.gitignore").is_file()); + + let fp = paths::root().join("foo/.gitignore"); + let mut contents = String::new(); + File::open(&fp) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert_eq!( + contents, + "/target\n\ + **/some.file\n\n\ + #Added by cargo\n\ + #\n\ + #already existing elements are commented out\n\ + \n\ + #/target\n\ + **/*.rs.bk\n\ + Cargo.lock\n", + ); + + cargo_process("build").cwd(&paths::root().join("foo")).run(); +} + +#[cargo_test] +fn both_lib_and_bin() { + cargo_process("init --lib --bin") + .env("USER", "foo") + .with_status(101) + .with_stderr("[ERROR] can't specify both lib and binary outputs") + .run(); +} + +fn bin_already_exists(explicit: bool, rellocation: &str) { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path = path.join(rellocation); + + let content = br#" + fn main() { + println!("Hello, world 2!"); + } + "#; + + File::create(&sourcefile_path) + .unwrap() + .write_all(content) + .unwrap(); + + if explicit { + cargo_process("init --bin --vcs none") + .env("USER", "foo") + .cwd(&path) + .run(); + } else { + cargo_process("init --vcs none") + .env("USER", "foo") + .cwd(&path) + .run(); + } + + assert!(paths::root().join("foo/Cargo.toml").is_file()); + assert!(!paths::root().join("foo/src/lib.rs").is_file()); + + // Check that our file is not overwritten + let mut new_content = Vec::new(); + File::open(&sourcefile_path) + .unwrap() + .read_to_end(&mut new_content) + .unwrap(); + assert_eq!(Vec::from(content as &[u8]), new_content); +} + +#[cargo_test] +fn bin_already_exists_explicit() { + bin_already_exists(true, "src/main.rs") +} + +#[cargo_test] +fn bin_already_exists_implicit() { + bin_already_exists(false, "src/main.rs") +} + +#[cargo_test] +fn bin_already_exists_explicit_nosrc() { + bin_already_exists(true, "main.rs") +} + +#[cargo_test] +fn bin_already_exists_implicit_nosrc() { + bin_already_exists(false, "main.rs") +} + +#[cargo_test] +fn bin_already_exists_implicit_namenosrc() { + bin_already_exists(false, "foo.rs") +} + +#[cargo_test] +fn bin_already_exists_implicit_namesrc() { + bin_already_exists(false, "src/foo.rs") +} + +#[cargo_test] +fn confused_by_multiple_lib_files() { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path1 = path.join("src/lib.rs"); + + File::create(&sourcefile_path1) + .unwrap() + .write_all(br#"fn qqq () { println!("Hello, world 2!"); }"#) + .unwrap(); + + let sourcefile_path2 = path.join("lib.rs"); + + File::create(&sourcefile_path2) + .unwrap() + .write_all(br#" fn qqq () { println!("Hello, world 3!"); }"#) + .unwrap(); + + cargo_process("init --vcs none").env("USER", "foo").cwd(&path).with_status(101).with_stderr( + "[ERROR] cannot have a package with multiple libraries, found both `src/lib.rs` and `lib.rs`", + ) + .run(); + + assert!(!paths::root().join("foo/Cargo.toml").is_file()); +} + +#[cargo_test] +fn multibin_project_name_clash() { + let path = paths::root().join("foo"); + fs::create_dir(&path).unwrap(); + + let sourcefile_path1 = path.join("foo.rs"); + + File::create(&sourcefile_path1) + .unwrap() + .write_all(br#"fn main () { println!("Hello, world 2!"); }"#) + .unwrap(); + + let sourcefile_path2 = path.join("main.rs"); + + File::create(&sourcefile_path2) + .unwrap() + .write_all(br#"fn main () { println!("Hello, world 3!"); }"#) + .unwrap(); + + cargo_process("init --lib --vcs none") + .env("USER", "foo") + .cwd(&path) + .with_status(101) + .with_stderr( + "\ +[ERROR] multiple possible binary sources found: + main.rs + foo.rs +cannot automatically generate Cargo.toml as the main target would be ambiguous +", + ) + .run(); + + assert!(!paths::root().join("foo/Cargo.toml").is_file()); +} + +fn lib_already_exists(rellocation: &str) { + let path = paths::root().join("foo"); + fs::create_dir_all(&path.join("src")).unwrap(); + + let sourcefile_path = path.join(rellocation); + + let content = br#" + pub fn qqq() {} + "#; + + File::create(&sourcefile_path) + .unwrap() + .write_all(content) + .unwrap(); + + cargo_process("init --vcs none") + .env("USER", "foo") + .cwd(&path) + .run(); + + assert!(paths::root().join("foo/Cargo.toml").is_file()); + assert!(!paths::root().join("foo/src/main.rs").is_file()); + + // Check that our file is not overwritten + let mut new_content = Vec::new(); + File::open(&sourcefile_path) + .unwrap() + .read_to_end(&mut new_content) + .unwrap(); + assert_eq!(Vec::from(content as &[u8]), new_content); +} + +#[cargo_test] +fn lib_already_exists_src() { + lib_already_exists("src/lib.rs"); +} + +#[cargo_test] +fn lib_already_exists_nosrc() { + lib_already_exists("lib.rs"); +} + +#[cargo_test] +fn simple_git() { + cargo_process("init --lib --vcs git") + .env("USER", "foo") + .run(); + + assert!(paths::root().join("Cargo.toml").is_file()); + assert!(paths::root().join("src/lib.rs").is_file()); + assert!(paths::root().join(".git").is_dir()); + assert!(paths::root().join(".gitignore").is_file()); +} + +#[cargo_test] +fn auto_git() { + cargo_process("init --lib").env("USER", "foo").run(); + + assert!(paths::root().join("Cargo.toml").is_file()); + assert!(paths::root().join("src/lib.rs").is_file()); + assert!(paths::root().join(".git").is_dir()); + assert!(paths::root().join(".gitignore").is_file()); +} + +#[cargo_test] +fn invalid_dir_name() { + let foo = &paths::root().join("foo.bar"); + fs::create_dir_all(&foo).unwrap(); + cargo_process("init") + .cwd(foo.clone()) + .env("USER", "foo") + .with_status(101) + .with_stderr( + "\ +[ERROR] Invalid character `.` in crate name: `foo.bar` +use --name to override crate name +", + ) + .run(); + + assert!(!foo.join("Cargo.toml").is_file()); +} + +#[cargo_test] +fn reserved_name() { + let test = &paths::root().join("test"); + fs::create_dir_all(&test).unwrap(); + cargo_process("init") + .cwd(test.clone()) + .env("USER", "foo") + .with_status(101) + .with_stderr( + "\ +[ERROR] The name `test` cannot be used as a crate name\n\ +use --name to override crate name +", + ) + .run(); + + assert!(!test.join("Cargo.toml").is_file()); +} + +#[cargo_test] +fn git_autodetect() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + assert!(paths::root().join("Cargo.toml").is_file()); + assert!(paths::root().join("src/lib.rs").is_file()); + assert!(paths::root().join(".git").is_dir()); + assert!(paths::root().join(".gitignore").is_file()); +} + +#[cargo_test] +fn mercurial_autodetect() { + fs::create_dir(&paths::root().join(".hg")).unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + assert!(paths::root().join("Cargo.toml").is_file()); + assert!(paths::root().join("src/lib.rs").is_file()); + assert!(!paths::root().join(".git").is_dir()); + assert!(paths::root().join(".hgignore").is_file()); +} + +#[cargo_test] +fn gitignore_appended_not_replaced() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join(".gitignore")) + .unwrap() + .write_all(b"qqqqqq\n") + .unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + assert!(paths::root().join("Cargo.toml").is_file()); + assert!(paths::root().join("src/lib.rs").is_file()); + assert!(paths::root().join(".git").is_dir()); + assert!(paths::root().join(".gitignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"qqqqqq"#)); +} + +#[cargo_test] +fn gitignore_added_newline_in_existing() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join(".gitignore")) + .unwrap() + .write_all(b"first") + .unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + assert!(paths::root().join(".gitignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.starts_with("first\n")); +} + +#[cargo_test] +fn gitignore_no_newline_in_new() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + assert!(paths::root().join(".gitignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(!contents.starts_with('\n')); +} + +#[cargo_test] +fn mercurial_added_newline_in_existing() { + fs::create_dir(&paths::root().join(".hg")).unwrap(); + + File::create(&paths::root().join(".hgignore")) + .unwrap() + .write_all(b"first") + .unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + assert!(paths::root().join(".hgignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".hgignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.starts_with("first\n")); +} + +#[cargo_test] +fn mercurial_no_newline_in_new() { + fs::create_dir(&paths::root().join(".hg")).unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + assert!(paths::root().join(".hgignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".hgignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(!contents.starts_with('\n')); +} + +#[cargo_test] +fn terminating_newline_in_new_git_ignore() { + cargo_process("init --vcs git --lib") + .env("USER", "foo") + .run(); + + let content = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); + + let mut last_chars = content.chars().rev(); + assert_eq!(last_chars.next(), Some('\n')); + assert_ne!(last_chars.next(), Some('\n')); +} + +#[cargo_test] +fn terminating_newline_in_new_mercurial_ignore() { + if !mercurial_available() { + return; + } + cargo_process("init --vcs hg --lib") + .env("USER", "foo") + .run(); + + let content = fs::read_to_string(&paths::root().join(".hgignore")).unwrap(); + + let mut last_chars = content.chars().rev(); + assert_eq!(last_chars.next(), Some('\n')); + assert_ne!(last_chars.next(), Some('\n')); +} + +#[cargo_test] +fn terminating_newline_in_existing_git_ignore() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + fs::write(&paths::root().join(".gitignore"), b"first").unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + let content = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); + + let mut last_chars = content.chars().rev(); + assert_eq!(last_chars.next(), Some('\n')); + assert_ne!(last_chars.next(), Some('\n')); +} + +#[cargo_test] +fn terminating_newline_in_existing_mercurial_ignore() { + fs::create_dir(&paths::root().join(".hg")).unwrap(); + fs::write(&paths::root().join(".hgignore"), b"first").unwrap(); + + cargo_process("init --lib").env("USER", "foo").run(); + + let content = fs::read_to_string(&paths::root().join(".hgignore")).unwrap(); + + let mut last_chars = content.chars().rev(); + assert_eq!(last_chars.next(), Some('\n')); + assert_ne!(last_chars.next(), Some('\n')); +} + +#[cargo_test] +fn cargo_lock_gitignored_if_lib1() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + cargo_process("init --lib --vcs git") + .env("USER", "foo") + .run(); + + assert!(paths::root().join(".gitignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"Cargo.lock"#)); +} + +#[cargo_test] +fn cargo_lock_gitignored_if_lib2() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join("lib.rs")) + .unwrap() + .write_all(br#""#) + .unwrap(); + + cargo_process("init --vcs git").env("USER", "foo").run(); + + assert!(paths::root().join(".gitignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"Cargo.lock"#)); +} + +#[cargo_test] +fn cargo_lock_not_gitignored_if_bin1() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + cargo_process("init --vcs git --bin") + .env("USER", "foo") + .run(); + + assert!(paths::root().join(".gitignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(!contents.contains(r#"Cargo.lock"#)); +} + +#[cargo_test] +fn cargo_lock_not_gitignored_if_bin2() { + fs::create_dir(&paths::root().join(".git")).unwrap(); + + File::create(&paths::root().join("main.rs")) + .unwrap() + .write_all(br#""#) + .unwrap(); + + cargo_process("init --vcs git").env("USER", "foo").run(); + + assert!(paths::root().join(".gitignore").is_file()); + + let mut contents = String::new(); + File::open(&paths::root().join(".gitignore")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(!contents.contains(r#"Cargo.lock"#)); +} + +#[cargo_test] +fn with_argument() { + cargo_process("init foo --vcs none") + .env("USER", "foo") + .run(); + assert!(paths::root().join("foo/Cargo.toml").is_file()); +} + +#[cargo_test] +fn unknown_flags() { + cargo_process("init foo --flag") + .with_status(1) + .with_stderr_contains( + "error: Found argument '--flag' which wasn't expected, or isn't valid in this context", + ) + .run(); +} + +#[cfg(not(windows))] +#[cargo_test] +fn no_filename() { + cargo_process("init /") + .with_status(101) + .with_stderr( + "[ERROR] cannot auto-detect package name from path \"/\" ; use --name to override" + .to_string(), + ) + .run(); +} diff --git a/tests/testsuite/install.rs b/tests/testsuite/install.rs new file mode 100644 index 00000000000..5ebb3018829 --- /dev/null +++ b/tests/testsuite/install.rs @@ -0,0 +1,1453 @@ +use std::fs::{self, File, OpenOptions}; +use std::io::prelude::*; + +use git2; + +use crate::support; +use crate::support::cross_compile; +use crate::support::git; +use crate::support::install::{assert_has_installed_exe, assert_has_not_installed_exe, cargo_home}; +use crate::support::paths; +use crate::support::registry::Package; +use crate::support::{basic_manifest, cargo_process, project}; + +fn pkg(name: &str, vers: &str) { + Package::new(name, vers) + .file("src/lib.rs", "") + .file( + "src/main.rs", + &format!("extern crate {}; fn main() {{}}", name), + ) + .publish(); +} + +#[cargo_test] +fn simple() { + pkg("foo", "0.0.1"); + + cargo_process("install foo") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] foo v0.0.1 (registry [..]) +[INSTALLING] foo v0.0.1 +[COMPILING] foo v0.0.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] +[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); + assert_has_installed_exe(cargo_home(), "foo"); + + cargo_process("uninstall foo") + .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]") + .run(); + assert_has_not_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn multiple_pkgs() { + pkg("foo", "0.0.1"); + pkg("bar", "0.0.2"); + + cargo_process("install foo bar baz") + .with_status(101) + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] foo v0.0.1 (registry `[CWD]/registry`) +[INSTALLING] foo v0.0.1 +[COMPILING] foo v0.0.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] +[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.2 (registry `[CWD]/registry`) +[INSTALLING] bar v0.0.2 +[COMPILING] bar v0.0.2 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE] +[INSTALLED] package `bar v0.0.2` (executable `bar[EXE]`) +[ERROR] could not find `baz` in registry `[..]` +[SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above). +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +[ERROR] some crates failed to install +", + ) + .run(); + assert_has_installed_exe(cargo_home(), "foo"); + assert_has_installed_exe(cargo_home(), "bar"); + + cargo_process("uninstall foo bar") + .with_stderr( + "\ +[REMOVING] [CWD]/home/.cargo/bin/foo[EXE] +[REMOVING] [CWD]/home/.cargo/bin/bar[EXE] +[SUMMARY] Successfully uninstalled foo, bar! +", + ) + .run(); + + assert_has_not_installed_exe(cargo_home(), "foo"); + assert_has_not_installed_exe(cargo_home(), "bar"); +} + +#[cargo_test] +fn pick_max_version() { + pkg("foo", "0.1.0"); + pkg("foo", "0.2.0"); + pkg("foo", "0.2.1"); + pkg("foo", "0.2.1-pre.1"); + pkg("foo", "0.3.0-pre.2"); + + cargo_process("install foo") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] foo v0.2.1 (registry [..]) +[INSTALLING] foo v0.2.1 +[COMPILING] foo v0.2.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] +[INSTALLED] package `foo v0.2.1` (executable `foo[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn installs_beta_version_by_explicit_name_from_git() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", &basic_manifest("foo", "0.3.0-beta.1")) + .file("src/main.rs", "fn main() {}") + .build(); + + cargo_process("install --git") + .arg(p.url().to_string()) + .arg("foo") + .run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn missing() { + pkg("foo", "0.0.1"); + cargo_process("install bar") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +[ERROR] could not find `bar` in registry `[..]` +", + ) + .run(); +} + +#[cargo_test] +fn bad_version() { + pkg("foo", "0.0.1"); + cargo_process("install foo --vers=0.2.0") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +[ERROR] could not find `foo` in registry `[..]` with version `=0.2.0` +", + ) + .run(); +} + +#[cargo_test] +fn bad_paths() { + cargo_process("install") + .with_status(101) + .with_stderr("[ERROR] `[CWD]` is not a crate root; specify a crate to install [..]") + .run(); + + cargo_process("install --path .") + .with_status(101) + .with_stderr("[ERROR] `[CWD]` does not contain a Cargo.toml file[..]") + .run(); + + let toml = paths::root().join("Cargo.toml"); + fs::write(toml, "").unwrap(); + cargo_process("install --path Cargo.toml") + .with_status(101) + .with_stderr("[ERROR] `[CWD]/Cargo.toml` is not a directory[..]") + .run(); + + cargo_process("install --path .") + .with_status(101) + .with_stderr_contains("[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`") + .run(); +} + +#[cargo_test] +fn install_location_precedence() { + pkg("foo", "0.0.1"); + + let root = paths::root(); + let t1 = root.join("t1"); + let t2 = root.join("t2"); + let t3 = root.join("t3"); + let t4 = cargo_home(); + + fs::create_dir(root.join(".cargo")).unwrap(); + File::create(root.join(".cargo/config")) + .unwrap() + .write_all( + format!( + "[install] + root = '{}' + ", + t3.display() + ) + .as_bytes(), + ) + .unwrap(); + + println!("install --root"); + + cargo_process("install foo --root") + .arg(&t1) + .env("CARGO_INSTALL_ROOT", &t2) + .run(); + assert_has_installed_exe(&t1, "foo"); + assert_has_not_installed_exe(&t2, "foo"); + + println!("install CARGO_INSTALL_ROOT"); + + cargo_process("install foo") + .env("CARGO_INSTALL_ROOT", &t2) + .run(); + assert_has_installed_exe(&t2, "foo"); + assert_has_not_installed_exe(&t3, "foo"); + + println!("install install.root"); + + cargo_process("install foo").run(); + assert_has_installed_exe(&t3, "foo"); + assert_has_not_installed_exe(&t4, "foo"); + + fs::remove_file(root.join(".cargo/config")).unwrap(); + + println!("install cargo home"); + + cargo_process("install foo").run(); + assert_has_installed_exe(&t4, "foo"); +} + +#[cargo_test] +fn install_path() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + cargo_process("install --path").arg(p.root()).run(); + assert_has_installed_exe(cargo_home(), "foo"); + p.cargo("install --path .") + .with_status(101) + .with_stderr( + "\ +[ERROR] binary `foo[..]` already exists in destination as part of `foo v0.0.1 [..]` +Add --force to overwrite +", + ) + .run(); +} + +#[cargo_test] +fn multiple_crates_error() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("a/src/main.rs", "fn main() {}") + .build(); + + cargo_process("install --git") + .arg(p.url().to_string()) + .with_status(101) + .with_stderr( + "\ +[UPDATING] git repository [..] +[ERROR] multiple packages with binaries found: bar, foo +", + ) + .run(); +} + +#[cargo_test] +fn multiple_crates_select() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("a/src/main.rs", "fn main() {}") + .build(); + + cargo_process("install --git") + .arg(p.url().to_string()) + .arg("foo") + .run(); + assert_has_installed_exe(cargo_home(), "foo"); + assert_has_not_installed_exe(cargo_home(), "bar"); + + cargo_process("install --git") + .arg(p.url().to_string()) + .arg("bar") + .run(); + assert_has_installed_exe(cargo_home(), "bar"); +} + +#[cargo_test] +fn multiple_crates_git_all() { + let p = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#"\ +[workspace] +members = ["bin1", "bin2"] +"#, + ) + .file("bin1/Cargo.toml", &basic_manifest("bin1", "0.1.0")) + .file("bin2/Cargo.toml", &basic_manifest("bin2", "0.1.0")) + .file( + "bin1/src/main.rs", + r#"fn main() { println!("Hello, world!"); }"#, + ) + .file( + "bin2/src/main.rs", + r#"fn main() { println!("Hello, world!"); }"#, + ) + .build(); + + cargo_process(&format!("install --git {} bin1 bin2", p.url().to_string())).run(); +} + +#[cargo_test] +fn multiple_crates_auto_binaries() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() {}") + .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("a/src/lib.rs", "") + .build(); + + cargo_process("install --path").arg(p.root()).run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn multiple_crates_auto_examples() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .file( + "examples/foo.rs", + " + extern crate bar; + extern crate foo; + fn main() {} + ", + ) + .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("a/src/lib.rs", "") + .build(); + + cargo_process("install --path") + .arg(p.root()) + .arg("--example=foo") + .run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn no_binaries_or_examples() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("a/src/lib.rs", "") + .build(); + + cargo_process("install --path") + .arg(p.root()) + .with_status(101) + .with_stderr("[ERROR] no packages found with binaries or examples") + .run(); +} + +#[cargo_test] +fn no_binaries() { + let p = project() + .file("src/lib.rs", "") + .file("examples/foo.rs", "fn main() {}") + .build(); + + cargo_process("install --path") + .arg(p.root()) + .arg("foo") + .with_status(101) + .with_stderr( + "\ +[ERROR] specified package `foo v0.0.1 ([..])` has no binaries +", + ) + .run(); +} + +#[cargo_test] +fn examples() { + let p = project() + .file("src/lib.rs", "") + .file("examples/foo.rs", "extern crate foo; fn main() {}") + .build(); + + cargo_process("install --path") + .arg(p.root()) + .arg("--example=foo") + .run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn install_twice() { + let p = project() + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + cargo_process("install --path").arg(p.root()).run(); + cargo_process("install --path") + .arg(p.root()) + .with_status(101) + .with_stderr( + "\ +[ERROR] binary `foo-bin1[..]` already exists in destination as part of `foo v0.0.1 ([..])` +binary `foo-bin2[..]` already exists in destination as part of `foo v0.0.1 ([..])` +Add --force to overwrite +", + ) + .run(); +} + +#[cargo_test] +fn install_force() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + cargo_process("install --path").arg(p.root()).run(); + + let p = project() + .at("foo2") + .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) + .file("src/main.rs", "fn main() {}") + .build(); + + cargo_process("install --force --path") + .arg(p.root()) + .with_stderr( + "\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] [CWD]/home/.cargo/bin/foo[EXE] +[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); + + cargo_process("install --list") + .with_stdout( + "\ +foo v0.2.0 ([..]): + foo[..] +", + ) + .run(); +} + +#[cargo_test] +fn install_force_partial_overlap() { + let p = project() + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + cargo_process("install --path").arg(p.root()).run(); + + let p = project() + .at("foo2") + .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) + .file("src/bin/foo-bin2.rs", "fn main() {}") + .file("src/bin/foo-bin3.rs", "fn main() {}") + .build(); + + cargo_process("install --force --path") + .arg(p.root()) + .with_stderr( + "\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [CWD]/home/.cargo/bin/foo-bin3[EXE] +[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE] +[INSTALLED] package `foo v0.2.0 ([..]/foo2)` (executable `foo-bin3[EXE]`) +[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo-bin2[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); + + cargo_process("install --list") + .with_stdout( + "\ +foo v0.0.1 ([..]): + foo-bin1[..] +foo v0.2.0 ([..]): + foo-bin2[..] + foo-bin3[..] +", + ) + .run(); +} + +#[cargo_test] +fn install_force_bin() { + let p = project() + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + cargo_process("install --path").arg(p.root()).run(); + + let p = project() + .at("foo2") + .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) + .file("src/bin/foo-bin1.rs", "fn main() {}") + .file("src/bin/foo-bin2.rs", "fn main() {}") + .build(); + + cargo_process("install --force --bin foo-bin2 --path") + .arg(p.root()) + .with_stderr( + "\ +[INSTALLING] foo v0.2.0 ([..]) +[COMPILING] foo v0.2.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE] +[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo-bin2[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); + + cargo_process("install --list") + .with_stdout( + "\ +foo v0.0.1 ([..]): + foo-bin1[..] +foo v0.2.0 ([..]): + foo-bin2[..] +", + ) + .run(); +} + +#[cargo_test] +fn compile_failure() { + let p = project().file("src/main.rs", "").build(); + + cargo_process("install --path") + .arg(p.root()) + .with_status(101) + .with_stderr_contains( + "\ +[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be \ + found at `[..]target` + +Caused by: + Could not compile `foo`. + +To learn more, run the command again with --verbose. +", + ) + .run(); +} + +#[cargo_test] +fn git_repo() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "fn main() {}") + .build(); + + // Use `--locked` to test that we don't even try to write a lock file. + cargo_process("install --locked --git") + .arg(p.url().to_string()) + .with_stderr( + "\ +[UPDATING] git repository `[..]` +[INSTALLING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] +[INSTALLED] package `foo v0.1.0 ([..]/foo#[..])` (executable `foo[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); + assert_has_installed_exe(cargo_home(), "foo"); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn list() { + pkg("foo", "0.0.1"); + pkg("bar", "0.2.1"); + pkg("bar", "0.2.2"); + + cargo_process("install --list").with_stdout("").run(); + + cargo_process("install bar --vers =0.2.1").run(); + cargo_process("install foo").run(); + cargo_process("install --list") + .with_stdout( + "\ +bar v0.2.1: + bar[..] +foo v0.0.1: + foo[..] +", + ) + .run(); +} + +#[cargo_test] +fn list_error() { + pkg("foo", "0.0.1"); + cargo_process("install foo").run(); + cargo_process("install --list") + .with_stdout( + "\ +foo v0.0.1: + foo[..] +", + ) + .run(); + let mut worldfile_path = cargo_home(); + worldfile_path.push(".crates.toml"); + let mut worldfile = OpenOptions::new() + .write(true) + .open(worldfile_path) + .expect(".crates.toml should be there"); + worldfile.write_all(b"\x00").unwrap(); + drop(worldfile); + cargo_process("install --list --verbose") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse crate metadata at `[..]` + +Caused by: + invalid TOML found for metadata + +Caused by: + unexpected character[..] +", + ) + .run(); +} + +#[cargo_test] +fn uninstall_pkg_does_not_exist() { + cargo_process("uninstall foo") + .with_status(101) + .with_stderr("[ERROR] package ID specification `foo` matched no packages") + .run(); +} + +#[cargo_test] +fn uninstall_bin_does_not_exist() { + pkg("foo", "0.0.1"); + + cargo_process("install foo").run(); + cargo_process("uninstall foo --bin=bar") + .with_status(101) + .with_stderr("[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1`") + .run(); +} + +#[cargo_test] +fn uninstall_piecemeal() { + let p = project() + .file("src/bin/foo.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .build(); + + cargo_process("install --path").arg(p.root()).run(); + assert_has_installed_exe(cargo_home(), "foo"); + assert_has_installed_exe(cargo_home(), "bar"); + + cargo_process("uninstall foo --bin=bar") + .with_stderr("[REMOVING] [..]bar[..]") + .run(); + + assert_has_installed_exe(cargo_home(), "foo"); + assert_has_not_installed_exe(cargo_home(), "bar"); + + cargo_process("uninstall foo --bin=foo") + .with_stderr("[REMOVING] [..]foo[..]") + .run(); + assert_has_not_installed_exe(cargo_home(), "foo"); + + cargo_process("uninstall foo") + .with_status(101) + .with_stderr("[ERROR] package ID specification `foo` matched no packages") + .run(); +} + +#[cargo_test] +fn subcommand_works_out_of_the_box() { + Package::new("cargo-foo", "1.0.0") + .file("src/main.rs", r#"fn main() { println!("bar"); }"#) + .publish(); + cargo_process("install cargo-foo").run(); + cargo_process("foo").with_stdout("bar\n").run(); + cargo_process("--list") + .with_stdout_contains(" foo\n") + .run(); +} + +#[cargo_test] +fn installs_from_cwd_by_default() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + p.cargo("install") + .with_stderr_contains( + "warning: Using `cargo install` to install the binaries for the \ + package in current working directory is deprecated, \ + use `cargo install --path .` instead. \ + Use `cargo build` if you want to simply build the package.", + ) + .run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn installs_from_cwd_with_2018_warnings() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + edition = "2018" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("install") + .with_status(101) + .with_stderr_contains( + "error: Using `cargo install` to install the binaries for the \ + package in current working directory is no longer supported, \ + use `cargo install --path .` instead. \ + Use `cargo build` if you want to simply build the package.", + ) + .run(); + assert_has_not_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn uninstall_cwd() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("install --path .") + .with_stderr(&format!( + "\ +[INSTALLING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] {home}/bin/foo[EXE] +[INSTALLED] package `foo v0.0.1 ([..]/foo)` (executable `foo[EXE]`) +[WARNING] be sure to add `{home}/bin` to your PATH to be able to run the installed binaries", + home = cargo_home().display(), + )) + .run(); + assert_has_installed_exe(cargo_home(), "foo"); + + p.cargo("uninstall") + .with_stdout("") + .with_stderr(&format!( + "[REMOVING] {home}/bin/foo[EXE]", + home = cargo_home().display() + )) + .run(); + assert_has_not_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn uninstall_cwd_not_installed() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("uninstall") + .with_status(101) + .with_stdout("") + .with_stderr("error: package `foo v0.0.1 ([CWD])` is not installed") + .run(); +} + +#[cargo_test] +fn uninstall_cwd_no_project() { + let err_msg = if cfg!(windows) { + "The system cannot find the file specified." + } else { + "No such file or directory" + }; + cargo_process("uninstall") + .with_status(101) + .with_stdout("") + .with_stderr(format!( + "\ +[ERROR] failed to read `[CWD]/Cargo.toml` + +Caused by: + {err_msg} (os error 2)", + err_msg = err_msg, + )) + .run(); +} + +#[cargo_test] +fn do_not_rebuilds_on_local_install() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + p.cargo("build --release").run(); + cargo_process("install --path") + .arg(p.root()) + .with_stderr( + "\ +[INSTALLING] [..] +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..] +[INSTALLED] package `foo v0.0.1 ([..]/foo)` (executable `foo[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); + + assert!(p.build_dir().exists()); + assert!(p.release_bin("foo").exists()); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn reports_unsuccessful_subcommand_result() { + Package::new("cargo-fail", "1.0.0") + .file("src/main.rs", "fn main() { panic!(); }") + .publish(); + cargo_process("install cargo-fail").run(); + cargo_process("--list") + .with_stdout_contains(" fail\n") + .run(); + cargo_process("fail") + .with_status(101) + .with_stderr_contains("thread '[..]' panicked at 'explicit panic', [..]") + .run(); +} + +#[cargo_test] +fn git_with_lockfile() { + let p = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "fn main() {}") + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.1.0" + dependencies = [ "bar 0.1.0" ] + + [[package]] + name = "bar" + version = "0.1.0" + "#, + ) + .build(); + + cargo_process("install --git") + .arg(p.url().to_string()) + .run(); +} + +#[cargo_test] +fn q_silences_warnings() { + let p = project().file("src/main.rs", "fn main() {}").build(); + + cargo_process("install -q --path") + .arg(p.root()) + .with_stderr("") + .run(); +} + +#[cargo_test] +fn readonly_dir() { + pkg("foo", "0.0.1"); + + let root = paths::root(); + let dir = &root.join("readonly"); + fs::create_dir(root.join("readonly")).unwrap(); + let mut perms = fs::metadata(dir).unwrap().permissions(); + perms.set_readonly(true); + fs::set_permissions(dir, perms).unwrap(); + + cargo_process("install foo").cwd(dir).run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn use_path_workspace() { + Package::new("foo", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [workspace] + members = ["baz"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "1" + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + let lock = p.read_lockfile(); + p.cargo("install").run(); + let lock2 = p.read_lockfile(); + assert_eq!(lock, lock2, "different lockfiles"); +} + +#[cargo_test] +fn dev_dependencies_no_check() { + Package::new("foo", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dev-dependencies] + baz = "1.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..] no matching package named `baz` found") + .run(); + p.cargo("install").run(); +} + +#[cargo_test] +fn dev_dependencies_lock_file_untouched() { + Package::new("foo", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dev-dependencies] + bar = { path = "a" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + let lock = p.read_lockfile(); + p.cargo("install").run(); + let lock2 = p.read_lockfile(); + assert!(lock == lock2, "different lockfiles"); +} + +#[cargo_test] +fn install_target_native() { + pkg("foo", "0.1.0"); + + cargo_process("install foo --target") + .arg(support::rustc_host()) + .run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn install_target_foreign() { + if cross_compile::disabled() { + return; + } + + pkg("foo", "0.1.0"); + + cargo_process("install foo --target") + .arg(cross_compile::alternate()) + .run(); + assert_has_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn vers_precise() { + pkg("foo", "0.1.1"); + pkg("foo", "0.1.2"); + + cargo_process("install foo --vers 0.1.1") + .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])") + .run(); +} + +#[cargo_test] +fn version_too() { + pkg("foo", "0.1.1"); + pkg("foo", "0.1.2"); + + cargo_process("install foo --version 0.1.1") + .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])") + .run(); +} + +#[cargo_test] +fn not_both_vers_and_version() { + pkg("foo", "0.1.1"); + pkg("foo", "0.1.2"); + + cargo_process("install foo --version 0.1.1 --vers 0.1.2") + .with_status(1) + .with_stderr_contains( + "\ +error: The argument '--version ' was provided more than once, \ +but cannot be used multiple times +", + ) + .run(); +} + +#[cargo_test] +fn legacy_version_requirement() { + pkg("foo", "0.1.1"); + + cargo_process("install foo --vers 0.1") + .with_stderr_contains( + "\ +warning: the `--vers` provided, `0.1`, is not a valid semver version + +historically Cargo treated this as a semver version requirement accidentally +and will continue to do so, but this behavior will be removed eventually +", + ) + .run(); +} + +#[cargo_test] +fn test_install_git_cannot_be_a_base_url() { + cargo_process("install --git github.com:rust-lang-nursery/rustfmt.git").with_status(101).with_stderr("error: invalid url `github.com:rust-lang-nursery/rustfmt.git`: cannot-be-a-base-URLs are not supported").run(); +} + +#[cargo_test] +fn uninstall_multiple_and_specifying_bin() { + cargo_process("uninstall foo bar --bin baz").with_status(101).with_stderr("error: A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.").run(); +} + +#[cargo_test] +fn uninstall_multiple_and_some_pkg_does_not_exist() { + pkg("foo", "0.0.1"); + + cargo_process("install foo").run(); + + cargo_process("uninstall foo bar") + .with_status(101) + .with_stderr( + "\ +[REMOVING] [CWD]/home/.cargo/bin/foo[EXE] +error: package ID specification `bar` matched no packages +[SUMMARY] Successfully uninstalled foo! Failed to uninstall bar (see error(s) above). +error: some packages failed to uninstall +", + ) + .run(); + + assert_has_not_installed_exe(cargo_home(), "foo"); + assert_has_not_installed_exe(cargo_home(), "bar"); +} + +#[cargo_test] +fn custom_target_dir_for_git_source() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "fn main() {}") + .build(); + + cargo_process("install --git") + .arg(p.url().to_string()) + .run(); + assert!(!paths::root().join("target/release").is_dir()); + + cargo_process("install --force --git") + .arg(p.url().to_string()) + .env("CARGO_TARGET_DIR", "target") + .run(); + assert!(paths::root().join("target/release").is_dir()); +} + +#[cargo_test] +fn install_respects_lock_file() { + // `cargo install` now requires --locked to use a Cargo.lock. + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.1.1") + .file("src/lib.rs", "not rust") + .publish(); + Package::new("foo", "0.1.0") + .dep("bar", "0.1") + .file("src/lib.rs", "") + .file( + "src/main.rs", + "extern crate foo; extern crate bar; fn main() {}", + ) + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "foo" +version = "0.1.0" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] +"#, + ) + .publish(); + + cargo_process("install foo") + .with_stderr_contains("[..]not rust[..]") + .with_status(101) + .run(); + cargo_process("install --locked foo").run(); +} + +#[cargo_test] +fn install_path_respects_lock_file() { + // --path version of install_path_respects_lock_file, --locked is required + // to use Cargo.lock. + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.1.1") + .file("src/lib.rs", "not rust") + .publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = "0.1" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() {}") + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "foo" +version = "0.1.0" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] +"#, + ) + .build(); + + p.cargo("install --path .") + .with_stderr_contains("[..]not rust[..]") + .with_status(101) + .run(); + p.cargo("install --path . --locked").run(); +} + +#[cargo_test] +fn lock_file_path_deps_ok() { + Package::new("bar", "0.1.0").publish(); + + Package::new("foo", "0.1.0") + .dep("bar", "0.1") + .file("src/lib.rs", "") + .file( + "src/main.rs", + "extern crate foo; extern crate bar; fn main() {}", + ) + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.1.0" + +[[package]] +name = "foo" +version = "0.1.0" +dependencies = [ + "bar 0.1.0", +] +"#, + ) + .publish(); + + cargo_process("install foo").run(); +} + +#[cargo_test] +fn install_empty_argument() { + // Bug 5229 + cargo_process("install") + .arg("") + .with_status(1) + .with_stderr_contains( + "[ERROR] The argument '...' requires a value but none was supplied", + ) + .run(); +} + +#[cargo_test] +fn git_repo_replace() { + let p = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/main.rs", "fn main() {}") + .build(); + let repo = git2::Repository::open(&p.root()).unwrap(); + let old_rev = repo.revparse_single("HEAD").unwrap().id(); + cargo_process("install --git") + .arg(p.url().to_string()) + .run(); + git::commit(&repo); + let new_rev = repo.revparse_single("HEAD").unwrap().id(); + let mut path = paths::home(); + path.push(".cargo/.crates.toml"); + + assert_ne!(old_rev, new_rev); + assert!(fs::read_to_string(path.clone()) + .unwrap() + .contains(&format!("{}", old_rev))); + cargo_process("install --force --git") + .arg(p.url().to_string()) + .run(); + assert!(fs::read_to_string(path) + .unwrap() + .contains(&format!("{}", new_rev))); +} + +#[cargo_test] +fn workspace_uses_workspace_target_dir() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + + [dependencies] + bar = { path = 'bar' } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --release").cwd("bar").run(); + cargo_process("install --path") + .arg(p.root().join("bar")) + .with_stderr( + "[INSTALLING] [..] +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..] +[INSTALLED] package `bar v0.1.0 ([..]/bar)` (executable `bar[EXE]`) +[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries +", + ) + .run(); +} + +#[cargo_test] +fn install_ignores_local_cargo_config() { + pkg("bar", "0.0.1"); + + let p = project() + .file( + ".cargo/config", + r#" + [build] + target = "non-existing-target" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("install bar").run(); + assert_has_installed_exe(cargo_home(), "bar"); +} + +#[cargo_test] +fn install_global_cargo_config() { + pkg("bar", "0.0.1"); + + let config = cargo_home().join("config"); + let mut toml = fs::read_to_string(&config).unwrap_or_default(); + + toml.push_str( + r#" + [build] + target = 'nonexistent' + "#, + ); + fs::write(&config, toml).unwrap(); + + cargo_process("install bar") + .with_status(101) + .with_stderr_contains("[..]--target nonexistent[..]") + .run(); +} + +#[cargo_test] +fn install_path_config() { + project() + .file( + ".cargo/config", + r#" + [build] + target = 'nonexistent' + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + cargo_process("install --path foo") + .with_status(101) + .with_stderr_contains("[..]--target nonexistent[..]") + .run(); +} + +#[cargo_test] +fn install_version_req() { + // Try using a few versionreq styles. + pkg("foo", "0.0.3"); + pkg("foo", "1.0.4"); + pkg("foo", "1.0.5"); + cargo_process("install foo --version=*") + .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") + .with_stderr_contains("[INSTALLING] foo v1.0.5") + .run(); + cargo_process("uninstall foo").run(); + cargo_process("install foo --version=^1.0") + .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") + .with_stderr_contains("[INSTALLING] foo v1.0.5") + .run(); + cargo_process("uninstall foo").run(); + cargo_process("install foo --version=0.0.*") + .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") + .with_stderr_contains("[INSTALLING] foo v0.0.3") + .run(); +} diff --git a/tests/testsuite/install_upgrade.rs b/tests/testsuite/install_upgrade.rs new file mode 100644 index 00000000000..ff4157f1a71 --- /dev/null +++ b/tests/testsuite/install_upgrade.rs @@ -0,0 +1,786 @@ +use cargo::core::PackageId; +use std::collections::BTreeSet; +use std::env; +use std::fs; +use std::path::PathBuf; +use std::sync::atomic::{AtomicUsize, Ordering}; + +use crate::support::install::{cargo_home, exe}; +use crate::support::paths::CargoPathExt; +use crate::support::registry::Package; +use crate::support::{ + basic_manifest, cargo_process, cross_compile, execs, git, process, project, Execs, +}; + +// Helper for publishing a package. +fn pkg(name: &str, vers: &str) { + Package::new(name, vers) + .file( + "src/main.rs", + r#"fn main() { println!("{}", env!("CARGO_PKG_VERSION")) }"#, + ) + .publish(); +} + +fn v1_path() -> PathBuf { + cargo_home().join(".crates.toml") +} + +fn v2_path() -> PathBuf { + cargo_home().join(".crates2.json") +} + +fn load_crates1() -> toml::Value { + toml::from_str(&fs::read_to_string(v1_path()).unwrap()).unwrap() +} + +fn load_crates2() -> serde_json::Value { + serde_json::from_str(&fs::read_to_string(v2_path()).unwrap()).unwrap() +} + +fn installed_exe(name: &str) -> PathBuf { + cargo_home().join("bin").join(exe(name)) +} + +/// Helper for executing binaries installed by cargo. +fn installed_process(name: &str) -> Execs { + static NEXT_ID: AtomicUsize = AtomicUsize::new(0); + thread_local!(static UNIQUE_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst)); + + // This copies the executable to a unique name so that it may be safely + // replaced on Windows. See Project::rename_run for details. + let src = installed_exe(name); + let dst = installed_exe(&UNIQUE_ID.with(|my_id| format!("{}-{}", name, my_id))); + // Note: Cannot use copy. On Linux, file descriptors may be left open to + // the executable as other tests in other threads are constantly spawning + // new processes (see https://github.com/rust-lang/cargo/pull/5557 for + // more). + fs::rename(&src, &dst) + .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e)); + // Leave behind a fake file so that reinstall duplicate check works. + fs::write(src, "").unwrap(); + let p = process(dst); + execs().with_process_builder(p) +} + +/// Check that the given package name/version has the following bins listed in +/// the trackers. Also verifies that both trackers are in sync and valid. +fn validate_trackers(name: &str, version: &str, bins: &[&str]) { + let v1 = load_crates1(); + let v1_table = v1.get("v1").unwrap().as_table().unwrap(); + let v2 = load_crates2(); + let v2_table = v2["installs"].as_object().unwrap(); + assert_eq!(v1_table.len(), v2_table.len()); + // Convert `bins` to a BTreeSet. + let bins: BTreeSet = bins + .iter() + .map(|b| format!("{}{}", b, env::consts::EXE_SUFFIX)) + .collect(); + // Check every entry matches between v1 and v2. + for (pkg_id_str, v1_bins) in v1_table { + let pkg_id: PackageId = toml::Value::from(pkg_id_str.to_string()) + .try_into() + .unwrap(); + let v1_bins: BTreeSet = v1_bins + .as_array() + .unwrap() + .iter() + .map(|b| b.as_str().unwrap().to_string()) + .collect(); + if pkg_id.name().as_str() == name && pkg_id.version().to_string() == version { + assert_eq!(bins, v1_bins); + } + let pkg_id_value = serde_json::to_value(&pkg_id).unwrap(); + let pkg_id_str = pkg_id_value.as_str().unwrap(); + let v2_info = v2_table + .get(pkg_id_str) + .expect("v2 missing v1 pkg") + .as_object() + .unwrap(); + let v2_bins = v2_info["bins"].as_array().unwrap(); + let v2_bins: BTreeSet = v2_bins + .iter() + .map(|b| b.as_str().unwrap().to_string()) + .collect(); + assert_eq!(v1_bins, v2_bins); + } +} + +#[cargo_test] +fn registry_upgrade() { + // Installing and upgrading from a registry. + pkg("foo", "1.0.0"); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] foo v1.0.0 (registry [..]) +[INSTALLING] foo v1.0.0 +[COMPILING] foo v1.0.0 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] +[INSTALLED] package `foo v1.0.0` (executable `foo[EXE]`) +[WARNING] be sure to add [..] +", + ) + .run(); + installed_process("foo").with_stdout("1.0.0").run(); + validate_trackers("foo", "1.0.0", &["foo"]); + + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[IGNORED] package `foo v1.0.0` is already installed[..] +[WARNING] be sure to add [..] +", + ) + .run(); + + pkg("foo", "1.0.1"); + + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] foo v1.0.1 (registry [..]) +[INSTALLING] foo v1.0.1 +[COMPILING] foo v1.0.1 +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] [CWD]/home/.cargo/bin/foo[EXE] +[REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`) +[WARNING] be sure to add [..] +", + ) + .run(); + + installed_process("foo").with_stdout("1.0.1").run(); + validate_trackers("foo", "1.0.1", &["foo"]); + + cargo_process("install foo --version=1.0.0 -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[COMPILING] foo v1.0.0") + .run(); + installed_process("foo").with_stdout("1.0.0").run(); + validate_trackers("foo", "1.0.0", &["foo"]); + + cargo_process("install foo --version=^1.0 -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[COMPILING] foo v1.0.1") + .run(); + installed_process("foo").with_stdout("1.0.1").run(); + validate_trackers("foo", "1.0.1", &["foo"]); + + cargo_process("install foo --version=^1.0 -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[IGNORED] package `foo v1.0.1` is already installed[..]") + .run(); +} + +#[cargo_test] +fn uninstall() { + // Basic uninstall test. + pkg("foo", "1.0.0"); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + cargo_process("uninstall foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + let data = load_crates2(); + assert_eq!(data["installs"].as_object().unwrap().len(), 0); + let v1_table = load_crates1(); + assert_eq!(v1_table.get("v1").unwrap().as_table().unwrap().len(), 0); +} + +#[cargo_test] +fn upgrade_force() { + pkg("foo", "1.0.0"); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + cargo_process("install foo -Z install-upgrade --force") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[INSTALLING] foo v1.0.0 +[COMPILING] foo v1.0.0 +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] [..]/.cargo/bin/foo[EXE] +[REPLACED] package `foo v1.0.0` with `foo v1.0.0` (executable `foo[EXE]`) +[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] +", + ) + .run(); + validate_trackers("foo", "1.0.0", &["foo"]); +} + +#[cargo_test] +fn ambiguous_version_no_longer_allowed() { + // Non-semver-requirement is not allowed for `--version`. + pkg("foo", "1.0.0"); + cargo_process("install foo --version=1.0 -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[ERROR] the `--vers` provided, `1.0`, is not a valid semver version: cannot parse '1.0' as a semver + +if you want to specify semver range, add an explicit qualifier, like ^1.0 +", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn path_is_always_dirty() { + // --path should always reinstall. + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("install --path . -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + p.cargo("install --path . -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[REPLACING] [..]/foo[EXE]") + .run(); +} + +#[cargo_test] +fn fails_for_conflicts_unknown() { + // If an untracked file is in the way, it should fail. + pkg("foo", "1.0.0"); + let exe = installed_exe("foo"); + exe.parent().unwrap().mkdir_p(); + fs::write(exe, "").unwrap(); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[ERROR] binary `foo[EXE]` already exists in destination") + .with_status(101) + .run(); +} + +#[cargo_test] +fn fails_for_conflicts_known() { + // If the same binary exists in another package, it should fail. + pkg("foo", "1.0.0"); + Package::new("bar", "1.0.0") + .file("src/bin/foo.rs", "fn main() {}") + .publish(); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + cargo_process("install bar -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains( + "[ERROR] binary `foo[EXE]` already exists in destination as part of `foo v1.0.0`", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn supports_multiple_binary_names() { + // Can individually install with --bin or --example + Package::new("foo", "1.0.0") + .file("src/main.rs", r#"fn main() { println!("foo"); }"#) + .file("src/bin/a.rs", r#"fn main() { println!("a"); }"#) + .file("examples/ex1.rs", r#"fn main() { println!("ex1"); }"#) + .publish(); + cargo_process("install foo -Z install-upgrade --bin foo") + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("foo").run(); + assert!(!installed_exe("a").exists()); + assert!(!installed_exe("ex1").exists()); + validate_trackers("foo", "1.0.0", &["foo"]); + cargo_process("install foo -Z install-upgrade --bin a") + .masquerade_as_nightly_cargo() + .run(); + installed_process("a").with_stdout("a").run(); + assert!(!installed_exe("ex1").exists()); + validate_trackers("foo", "1.0.0", &["a", "foo"]); + cargo_process("install foo -Z install-upgrade --example ex1") + .masquerade_as_nightly_cargo() + .run(); + installed_process("ex1").with_stdout("ex1").run(); + validate_trackers("foo", "1.0.0", &["a", "ex1", "foo"]); + cargo_process("uninstall foo -Z install-upgrade --bin foo") + .masquerade_as_nightly_cargo() + .run(); + assert!(!installed_exe("foo").exists()); + assert!(installed_exe("ex1").exists()); + validate_trackers("foo", "1.0.0", &["a", "ex1"]); + cargo_process("uninstall foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + assert!(!installed_exe("ex1").exists()); + assert!(!installed_exe("a").exists()); +} + +#[cargo_test] +fn v1_already_installed_fresh() { + // Install with v1, then try to install again with v2. + pkg("foo", "1.0.0"); + cargo_process("install foo").run(); + cargo_process("install foo -Z install-upgrade") + .with_stderr_contains("[IGNORED] package `foo v1.0.0` is already installed[..]") + .masquerade_as_nightly_cargo() + .run(); +} + +#[cargo_test] +fn v1_already_installed_dirty() { + // Install with v1, then install a new version with v2. + pkg("foo", "1.0.0"); + cargo_process("install foo").run(); + pkg("foo", "1.0.1"); + cargo_process("install foo -Z install-upgrade") + .with_stderr_contains("[COMPILING] foo v1.0.1") + .with_stderr_contains("[REPLACING] [..]/foo[EXE]") + .masquerade_as_nightly_cargo() + .run(); + validate_trackers("foo", "1.0.1", &["foo"]); +} + +#[cargo_test] +fn change_features_rebuilds() { + Package::new("foo", "1.0.0") + .file( + "src/main.rs", + r#"fn main() { + if cfg!(feature = "f1") { + println!("f1"); + } + if cfg!(feature = "f2") { + println!("f2"); + } + }"#, + ) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "1.0.0" + + [features] + f1 = [] + f2 = [] + default = ["f1"] + "#, + ) + .publish(); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("f1").run(); + cargo_process("install foo -Z install-upgrade --no-default-features") + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("").run(); + cargo_process("install foo -Z install-upgrade --all-features") + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("f1\nf2").run(); + cargo_process("install foo -Z install-upgrade --no-default-features --features=f1") + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("f1").run(); +} + +#[cargo_test] +fn change_profile_rebuilds() { + pkg("foo", "1.0.0"); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + cargo_process("install foo -Z install-upgrade --debug") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[COMPILING] foo v1.0.0") + .with_stderr_contains("[REPLACING] [..]foo[EXE]") + .run(); + cargo_process("install foo -Z install-upgrade --debug") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[IGNORED] package `foo v1.0.0` is already installed[..]") + .run(); +} + +#[cargo_test] +fn change_target_rebuilds() { + if cross_compile::disabled() { + return; + } + pkg("foo", "1.0.0"); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + let target = cross_compile::alternate(); + cargo_process("install foo -v -Z install-upgrade --target") + .arg(&target) + .masquerade_as_nightly_cargo() + .with_stderr_contains("[COMPILING] foo v1.0.0") + .with_stderr_contains("[REPLACING] [..]foo[EXE]") + .with_stderr_contains(&format!("[..]--target {}[..]", target)) + .run(); +} + +#[cargo_test] +fn change_bin_sets_rebuilds() { + // Changing which bins in a multi-bin project should reinstall. + Package::new("foo", "1.0.0") + .file("src/main.rs", "fn main() { }") + .file("src/bin/x.rs", "fn main() { }") + .file("src/bin/y.rs", "fn main() { }") + .publish(); + cargo_process("install foo -Z install-upgrade --bin x") + .masquerade_as_nightly_cargo() + .run(); + assert!(installed_exe("x").exists()); + assert!(!installed_exe("y").exists()); + assert!(!installed_exe("foo").exists()); + validate_trackers("foo", "1.0.0", &["x"]); + cargo_process("install foo -Z install-upgrade --bin y") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[INSTALLED] package `foo v1.0.0` (executable `y[EXE]`)") + .run(); + assert!(installed_exe("x").exists()); + assert!(installed_exe("y").exists()); + assert!(!installed_exe("foo").exists()); + validate_trackers("foo", "1.0.0", &["x", "y"]); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[INSTALLED] package `foo v1.0.0` (executable `foo[EXE]`)") + .with_stderr_contains( + "[REPLACED] package `foo v1.0.0` with `foo v1.0.0` (executables `x[EXE]`, `y[EXE]`)", + ) + .run(); + assert!(installed_exe("x").exists()); + assert!(installed_exe("y").exists()); + assert!(installed_exe("foo").exists()); + validate_trackers("foo", "1.0.0", &["foo", "x", "y"]); +} + +#[cargo_test] +fn forwards_compatible() { + // Unknown fields should be preserved. + pkg("foo", "1.0.0"); + pkg("bar", "1.0.0"); + cargo_process("install foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + let key = "foo 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)"; + let v2 = cargo_home().join(".crates2.json"); + let mut data = load_crates2(); + data["newfield"] = serde_json::Value::Bool(true); + data["installs"][key]["moreinfo"] = serde_json::Value::String("shazam".to_string()); + fs::write(&v2, serde_json::to_string(&data).unwrap()).unwrap(); + cargo_process("install bar -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + let data: serde_json::Value = serde_json::from_str(&fs::read_to_string(&v2).unwrap()).unwrap(); + assert_eq!(data["newfield"].as_bool().unwrap(), true); + assert_eq!( + data["installs"][key]["moreinfo"].as_str().unwrap(), + "shazam" + ); +} + +#[cargo_test] +fn v2_syncs() { + // V2 inherits the installs from V1. + pkg("one", "1.0.0"); + pkg("two", "1.0.0"); + pkg("three", "1.0.0"); + let p = project() + .file("src/bin/x.rs", "fn main() {}") + .file("src/bin/y.rs", "fn main() {}") + .build(); + cargo_process("install one -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + validate_trackers("one", "1.0.0", &["one"]); + p.cargo("install -Z install-upgrade --path .") + .masquerade_as_nightly_cargo() + .run(); + validate_trackers("foo", "1.0.0", &["x", "y"]); + // v1 add/remove + cargo_process("install two").run(); + cargo_process("uninstall one").run(); + // This should pick up that `two` was added, `one` was removed. + cargo_process("install three -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + validate_trackers("three", "1.0.0", &["three"]); + cargo_process("install --list") + .with_stdout( + "\ +foo v0.0.1 ([..]/foo): + x[EXE] + y[EXE] +three v1.0.0: + three[EXE] +two v1.0.0: + two[EXE] +", + ) + .run(); + cargo_process("install one -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + installed_process("one").with_stdout("1.0.0").run(); + validate_trackers("one", "1.0.0", &["one"]); + cargo_process("install two -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[IGNORED] package `two v1.0.0` is already installed[..]") + .run(); + // v1 remove + p.cargo("uninstall --bin x").run(); + pkg("x", "1.0.0"); + pkg("y", "1.0.0"); + // This should succeed because `x` was removed in V1. + cargo_process("install x -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + validate_trackers("x", "1.0.0", &["x"]); + // This should fail because `y` still exists in a different package. + cargo_process("install y -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr_contains( + "[ERROR] binary `y[EXE]` already exists in destination \ + as part of `foo v0.0.1 ([..])`", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn upgrade_git() { + let git_project = + git::new("foo", |project| project.file("src/main.rs", "fn main() {}")).unwrap(); + // install + cargo_process("install -Z install-upgrade --git") + .arg(git_project.url().to_string()) + .masquerade_as_nightly_cargo() + .run(); + // Check install stays fresh. + cargo_process("install -Z install-upgrade --git") + .arg(git_project.url().to_string()) + .masquerade_as_nightly_cargo() + .with_stderr_contains( + "[IGNORED] package `foo v0.0.1 (file://[..]/foo#[..])` is \ + already installed,[..]", + ) + .run(); + // Modify a file. + let repo = git2::Repository::open(git_project.root()).unwrap(); + git_project.change_file("src/main.rs", r#"fn main() {println!("onomatopoeia");}"#); + git::add(&repo); + git::commit(&repo); + // Install should reinstall. + cargo_process("install -Z install-upgrade --git") + .arg(git_project.url().to_string()) + .masquerade_as_nightly_cargo() + .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") + .with_stderr_contains("[REPLACING] [..]/foo[EXE]") + .run(); + installed_process("foo").with_stdout("onomatopoeia").run(); + // Check install stays fresh. + cargo_process("install -Z install-upgrade --git") + .arg(git_project.url().to_string()) + .masquerade_as_nightly_cargo() + .with_stderr_contains( + "[IGNORED] package `foo v0.0.1 (file://[..]/foo#[..])` is \ + already installed,[..]", + ) + .run(); +} + +#[cargo_test] +fn switch_sources() { + // Installing what appears to be the same thing, but from different + // sources should reinstall. + pkg("foo", "1.0.0"); + Package::new("foo", "1.0.0") + .file("src/main.rs", r#"fn main() { println!("alt"); }"#) + .alternative(true) + .publish(); + let p = project() + .at("foo-local") // so it doesn't use the same directory as the git project + .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) + .file("src/main.rs", r#"fn main() { println!("local"); }"#) + .build(); + let git_project = git::new("foo", |project| { + project.file("src/main.rs", r#"fn main() { println!("git"); }"#) + }) + .unwrap(); + + cargo_process("install -Z install-upgrade foo") + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("1.0.0").run(); + cargo_process("install -Z install-upgrade foo --registry alternative") + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("alt").run(); + p.cargo("install -Z install-upgrade --path .") + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("local").run(); + cargo_process("install -Z install-upgrade --git") + .arg(git_project.url().to_string()) + .masquerade_as_nightly_cargo() + .run(); + installed_process("foo").with_stdout("git").run(); +} + +#[cargo_test] +fn multiple_report() { + // Testing the full output that indicates installed/ignored/replaced/summary. + pkg("one", "1.0.0"); + pkg("two", "1.0.0"); + fn three(vers: &str) { + Package::new("three", vers) + .file("src/main.rs", "fn main() { }") + .file("src/bin/x.rs", "fn main() { }") + .file("src/bin/y.rs", "fn main() { }") + .publish(); + } + three("1.0.0"); + cargo_process("install -Z install-upgrade one two three") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] one v1.0.0 (registry `[..]`) +[INSTALLING] one v1.0.0 +[COMPILING] one v1.0.0 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..]/.cargo/bin/one[EXE] +[INSTALLED] package `one v1.0.0` (executable `one[EXE]`) +[DOWNLOADING] crates ... +[DOWNLOADED] two v1.0.0 (registry `[..]`) +[INSTALLING] two v1.0.0 +[COMPILING] two v1.0.0 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..]/.cargo/bin/two[EXE] +[INSTALLED] package `two v1.0.0` (executable `two[EXE]`) +[DOWNLOADING] crates ... +[DOWNLOADED] three v1.0.0 (registry `[..]`) +[INSTALLING] three v1.0.0 +[COMPILING] three v1.0.0 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..]/.cargo/bin/three[EXE] +[INSTALLING] [..]/.cargo/bin/x[EXE] +[INSTALLING] [..]/.cargo/bin/y[EXE] +[INSTALLED] package `three v1.0.0` (executables `three[EXE]`, `x[EXE]`, `y[EXE]`) +[SUMMARY] Successfully installed one, two, three! +[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] +", + ) + .run(); + pkg("foo", "1.0.1"); + pkg("bar", "1.0.1"); + three("1.0.1"); + cargo_process("install -Z install-upgrade one two three") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[IGNORED] package `one v1.0.0` is already installed, use --force to override +[IGNORED] package `two v1.0.0` is already installed, use --force to override +[DOWNLOADING] crates ... +[DOWNLOADED] three v1.0.1 (registry `[..]`) +[INSTALLING] three v1.0.1 +[COMPILING] three v1.0.1 +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] [..]/.cargo/bin/three[EXE] +[REPLACING] [..]/.cargo/bin/x[EXE] +[REPLACING] [..]/.cargo/bin/y[EXE] +[REPLACED] package `three v1.0.0` with `three v1.0.1` (executables `three[EXE]`, `x[EXE]`, `y[EXE]`) +[SUMMARY] Successfully installed one, two, three! +[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] +", + ) + .run(); + cargo_process("uninstall -Z install-upgrade three") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[REMOVING] [..]/.cargo/bin/three[EXE] +[REMOVING] [..]/.cargo/bin/x[EXE] +[REMOVING] [..]/.cargo/bin/y[EXE] +", + ) + .run(); + cargo_process("install -Z install-upgrade three --bin x") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[INSTALLING] three v1.0.1 +[COMPILING] three v1.0.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..]/.cargo/bin/x[EXE] +[INSTALLED] package `three v1.0.1` (executable `x[EXE]`) +[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] +", + ) + .run(); + cargo_process("install -Z install-upgrade three") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[INSTALLING] three v1.0.1 +[COMPILING] three v1.0.1 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..]/.cargo/bin/three[EXE] +[INSTALLING] [..]/.cargo/bin/y[EXE] +[REPLACING] [..]/.cargo/bin/x[EXE] +[INSTALLED] package `three v1.0.1` (executables `three[EXE]`, `y[EXE]`) +[REPLACED] package `three v1.0.1` with `three v1.0.1` (executable `x[EXE]`) +[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] +", + ) + .run(); +} + +#[cargo_test] +fn no_track_gated() { + cargo_process("install --no-track foo") + .masquerade_as_nightly_cargo() + .with_stderr( + "[ERROR] `--no-track` flag is unstable, pass `-Z install-upgrade` to enable it", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn no_track() { + pkg("foo", "1.0.0"); + cargo_process("install --no-track foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .run(); + assert!(!v1_path().exists()); + assert!(!v2_path().exists()); + cargo_process("install --no-track foo -Z install-upgrade") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[ERROR] binary `foo[EXE]` already exists in destination +Add --force to overwrite +", + ) + .with_status(101) + .run(); +} diff --git a/tests/testsuite/jobserver.rs b/tests/testsuite/jobserver.rs new file mode 100644 index 00000000000..59300b6762c --- /dev/null +++ b/tests/testsuite/jobserver.rs @@ -0,0 +1,201 @@ +use std::net::TcpListener; +use std::process::Command; +use std::thread; + +use crate::support::{cargo_exe, project}; + +#[cargo_test] +fn jobserver_exists() { + let p = project() + .file( + "build.rs", + r#" + use std::env; + + fn main() { + let var = env::var("CARGO_MAKEFLAGS").unwrap(); + let arg = var.split(' ') + .find(|p| p.starts_with("--jobserver")) + .unwrap(); + let val = &arg[arg.find('=').unwrap() + 1..]; + validate(val); + } + + #[cfg(unix)] + fn validate(s: &str) { + use std::fs::File; + use std::io::*; + use std::os::unix::prelude::*; + + let fds = s.split(',').collect::>(); + println!("{}", s); + assert_eq!(fds.len(), 2); + unsafe { + let mut read = File::from_raw_fd(fds[0].parse().unwrap()); + let mut write = File::from_raw_fd(fds[1].parse().unwrap()); + + let mut buf = [0]; + assert_eq!(read.read(&mut buf).unwrap(), 1); + assert_eq!(write.write(&buf).unwrap(), 1); + } + } + + #[cfg(windows)] + fn validate(_: &str) { + // a little too complicated for a test... + } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn makes_jobserver_used() { + let make = if cfg!(windows) { + "mingw32-make" + } else { + "make" + }; + if Command::new(make).arg("--version").output().is_err() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + d1 = { path = "d1" } + d2 = { path = "d2" } + d3 = { path = "d3" } + "#, + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#, + ) + .file("d2/src/lib.rs", "") + .file( + "d3/Cargo.toml", + r#" + [package] + name = "d3" + version = "0.0.1" + authors = [] + build = "../dbuild.rs" + "#, + ) + .file("d3/src/lib.rs", "") + .file( + "dbuild.rs", + r#" + use std::net::TcpStream; + use std::env; + use std::io::Read; + + fn main() { + let addr = env::var("ADDR").unwrap(); + let mut stream = TcpStream::connect(addr).unwrap(); + let mut v = Vec::new(); + stream.read_to_end(&mut v).unwrap(); + } + "#, + ) + .file( + "Makefile", + "\ +all: +\t+$(CARGO) build +", + ) + .build(); + + let l = TcpListener::bind("127.0.0.1:0").unwrap(); + let addr = l.local_addr().unwrap(); + + let child = thread::spawn(move || { + let a1 = l.accept().unwrap(); + let a2 = l.accept().unwrap(); + l.set_nonblocking(true).unwrap(); + + for _ in 0..1000 { + assert!(l.accept().is_err()); + thread::yield_now(); + } + + drop(a1); + l.set_nonblocking(false).unwrap(); + let a3 = l.accept().unwrap(); + + drop((a2, a3)); + }); + + p.process(make) + .env("CARGO", cargo_exe()) + .env("ADDR", addr.to_string()) + .arg("-j2") + .run(); + child.join().unwrap(); +} + +#[cargo_test] +fn jobserver_and_j() { + let make = if cfg!(windows) { + "mingw32-make" + } else { + "make" + }; + if Command::new(make).arg("--version").output().is_err() { + return; + } + + let p = project() + .file("src/lib.rs", "") + .file( + "Makefile", + "\ +all: +\t+$(CARGO) build -j2 +", + ) + .build(); + + p.process(make) + .env("CARGO", cargo_exe()) + .arg("-j2") + .with_stderr( + "\ +warning: a `-j` argument was passed to Cargo but Cargo is also configured \ +with an external jobserver in its environment, ignoring the `-j` parameter +[COMPILING] [..] +[FINISHED] [..] +", + ) + .run(); +} diff --git a/tests/testsuite/list_targets.rs b/tests/testsuite/list_targets.rs new file mode 100644 index 00000000000..9db79afb525 --- /dev/null +++ b/tests/testsuite/list_targets.rs @@ -0,0 +1,189 @@ +use crate::support::project; + +const EXAMPLE: u8 = 0x1; +const BIN: u8 = 0x2; +const TEST: u8 = 0x4; +const BENCH: u8 = 0x8; + +fn list_targets_test(command: &str, targets: u8) { + let full_project = project() + .file("examples/a.rs", "fn main() { }") + .file("examples/b.rs", "fn main() { }") + .file("benches/bench1.rs", "") + .file("benches/bench2.rs", "") + .file("tests/test1.rs", "") + .file("tests/test2.rs", "") + .file("src/main.rs", "fn main() { }") + .build(); + + if targets & EXAMPLE != 0 { + full_project + .cargo(&format!("{} --example", command)) + .with_stderr( + "\ +error: \"--example\" takes one argument. +Available examples: + a + b + +", + ) + .with_status(101) + .run(); + } + + if targets & BIN != 0 { + full_project + .cargo(&format!("{} --bin", command)) + .with_stderr( + "\ +error: \"--bin\" takes one argument. +Available binaries: + foo + +", + ) + .with_status(101) + .run(); + } + + if targets & BENCH != 0 { + full_project + .cargo(&format!("{} --bench", command)) + .with_stderr( + "\ +error: \"--bench\" takes one argument. +Available benches: + bench1 + bench2 + +", + ) + .with_status(101) + .run(); + } + + if targets & TEST != 0 { + full_project + .cargo(&format!("{} --test", command)) + .with_stderr( + "\ +error: \"--test\" takes one argument. +Available tests: + test1 + test2 + +", + ) + .with_status(101) + .run(); + } + + let empty_project = project().file("src/lib.rs", "").build(); + + if targets & EXAMPLE != 0 { + empty_project + .cargo(&format!("{} --example", command)) + .with_stderr( + "\ +error: \"--example\" takes one argument. +No examples available. + +", + ) + .with_status(101) + .run(); + } + + if targets & BIN != 0 { + empty_project + .cargo(&format!("{} --bin", command)) + .with_stderr( + "\ +error: \"--bin\" takes one argument. +No binaries available. + +", + ) + .with_status(101) + .run(); + } + + if targets & BENCH != 0 { + empty_project + .cargo(&format!("{} --bench", command)) + .with_stderr( + "\ +error: \"--bench\" takes one argument. +No benches available. + +", + ) + .with_status(101) + .run(); + } + + if targets & TEST != 0 { + empty_project + .cargo(&format!("{} --test", command)) + .with_stderr( + "\ +error: \"--test\" takes one argument. +No tests available. + +", + ) + .with_status(101) + .run(); + } +} + +#[cargo_test] +fn build_list_targets() { + list_targets_test("build", EXAMPLE | BIN | TEST | BENCH); +} + +#[cargo_test] +fn check_list_targets() { + list_targets_test("check", EXAMPLE | BIN | TEST | BENCH); +} + +#[cargo_test] +fn doc_list_targets() { + list_targets_test("doc", BIN); +} + +#[cargo_test] +fn fix_list_targets() { + list_targets_test("fix", EXAMPLE | BIN | TEST | BENCH); +} + +#[cargo_test] +fn run_list_targets() { + list_targets_test("run", EXAMPLE | BIN); +} + +#[cargo_test] +fn test_list_targets() { + list_targets_test("test", EXAMPLE | BIN | TEST | BENCH); +} + +#[cargo_test] +fn bench_list_targets() { + list_targets_test("bench", EXAMPLE | BIN | TEST | BENCH); +} + +#[cargo_test] +fn install_list_targets() { + list_targets_test("install", EXAMPLE | BIN); +} + +#[cargo_test] +fn rustdoc_list_targets() { + list_targets_test("rustdoc", EXAMPLE | BIN | TEST | BENCH); +} + +#[cargo_test] +fn rustc_list_targets() { + list_targets_test("rustc", EXAMPLE | BIN | TEST | BENCH); +} diff --git a/tests/testsuite/local_registry.rs b/tests/testsuite/local_registry.rs new file mode 100644 index 00000000000..7df486dd997 --- /dev/null +++ b/tests/testsuite/local_registry.rs @@ -0,0 +1,485 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use crate::support::paths::{self, CargoPathExt}; +use crate::support::registry::{registry_path, Package}; +use crate::support::{basic_manifest, project}; + +fn setup() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + br#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + local-registry = 'registry' + "# + )); +} + +#[cargo_test] +fn simple() { + setup(); + Package::new("bar", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UNPACKING] bar v0.0.1 ([..]) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] [..] +", + ) + .run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); + p.cargo("test").run(); +} + +#[cargo_test] +fn depend_on_yanked() { + setup(); + Package::new("bar", "0.0.1").local(true).publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + // Run cargo to create lock file. + p.cargo("check").run(); + + registry_path().join("index").join("3").rm_rf(); + Package::new("bar", "0.0.1") + .local(true) + .yanked(true) + .publish(); + + p.cargo("check") + .with_stderr( + "\ +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn multiple_versions() { + setup(); + Package::new("bar", "0.0.1").local(true).publish(); + Package::new("bar", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UNPACKING] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] [..] +", + ) + .run(); + + Package::new("bar", "0.2.0") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + + p.cargo("update -v") + .with_stderr("[UPDATING] bar v0.1.0 -> v0.2.0") + .run(); +} + +#[cargo_test] +fn multiple_names() { + setup(); + Package::new("bar", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + Package::new("baz", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn baz() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + baz = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + extern crate baz; + pub fn foo() { + bar::bar(); + baz::baz(); + } + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn interdependent() { + setup(); + Package::new("bar", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + Package::new("baz", "0.1.0") + .local(true) + .dep("bar", "*") + .file("src/lib.rs", "extern crate bar; pub fn baz() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + baz = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + extern crate baz; + pub fn foo() { + bar::bar(); + baz::baz(); + } + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] bar v0.0.1 +[COMPILING] baz v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn path_dep_rewritten() { + setup(); + Package::new("bar", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + Package::new("baz", "0.1.0") + .local(true) + .dep("bar", "*") + .file( + "Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar", version = "*" } + "#, + ) + .file("src/lib.rs", "extern crate bar; pub fn baz() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + baz = "*" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + extern crate baz; + pub fn foo() { + bar::bar(); + baz::baz(); + } + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] bar v0.0.1 +[COMPILING] baz v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn invalid_dir_bad() { + setup(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-directory' + + [source.my-awesome-local-directory] + local-registry = '/path/to/nowhere' + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry `https://[..]` + +Caused by: + failed to update replaced source registry `https://[..]` + +Caused by: + local registry path is not a directory: [..]path[..]to[..]nowhere +", + ) + .run(); +} + +#[cargo_test] +fn different_directory_replacing_the_registry_is_bad() { + setup(); + + // Move our test's .cargo/config to a temporary location and publish a + // registry package we're going to use first. + let config = paths::root().join(".cargo"); + let config_tmp = paths::root().join(".cargo-old"); + t!(fs::rename(&config, &config_tmp)); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + // Generate a lock file against the crates.io registry + Package::new("bar", "0.0.1").publish(); + p.cargo("build").run(); + + // Switch back to our directory source, and now that we're replacing + // crates.io make sure that this fails because we're replacing with a + // different checksum + config.rm_rf(); + t!(fs::rename(&config_tmp, &config)); + Package::new("bar", "0.0.1") + .file("src/lib.rs", "invalid") + .local(true) + .publish(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] checksum for `bar v0.0.1` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g., a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `bar v0.0.1` is the same as when the lockfile was generated + +", + ) + .run(); +} + +#[cargo_test] +fn crates_io_registry_url_is_optional() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all( + br#" + [source.crates-io] + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + local-registry = 'registry' + "# + )); + + Package::new("bar", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.0.1" + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UNPACKING] bar v0.0.1 ([..]) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] [..] +", + ) + .run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); + p.cargo("test").run(); +} diff --git a/tests/testsuite/lockfile_compat.rs b/tests/testsuite/lockfile_compat.rs new file mode 100644 index 00000000000..bc40866fede --- /dev/null +++ b/tests/testsuite/lockfile_compat.rs @@ -0,0 +1,633 @@ +use crate::support::git; +use crate::support::registry::Package; +use crate::support::{basic_manifest, lines_match, project}; + +#[cargo_test] +fn oldest_lockfile_still_works() { + let cargo_commands = vec!["build", "update"]; + for cargo_command in cargo_commands { + oldest_lockfile_still_works_with_command(cargo_command); + } +} + +fn oldest_lockfile_still_works_with_command(cargo_command: &str) { + Package::new("bar", "0.1.0").publish(); + + let expected_lockfile = r#"# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[metadata] +"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "[..]" +"#; + + let old_lockfile = r#" +[root] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +"#; + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("Cargo.lock", old_lockfile) + .build(); + + p.cargo(cargo_command).run(); + + let lock = p.read_lockfile(); + for (l, r) in expected_lockfile.lines().zip(lock.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(lock.lines().count(), expected_lockfile.lines().count()); +} + +#[cargo_test] +fn frozen_flag_preserves_old_lockfile() { + let cksum = Package::new("bar", "0.1.0").publish(); + + let old_lockfile = format!( + r#"[root] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" +"#, + cksum, + ); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("Cargo.lock", &old_lockfile) + .build(); + + p.cargo("build --locked").run(); + + let lock = p.read_lockfile(); + for (l, r) in old_lockfile.lines().zip(lock.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(lock.lines().count(), old_lockfile.lines().count()); +} + +#[cargo_test] +fn totally_wild_checksums_works() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"checksum bar 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#, + ); + + let p = p.build(); + + p.cargo("build").run(); + + let lock = p.read_lockfile(); + assert!(lock.starts_with( + r#" +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[metadata] +"# + .trim() + )); +} + +#[cargo_test] +fn wrong_checksum_is_an_error() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#, + ); + + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] `[..]` index +error: checksum for `bar v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g., a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `bar v0.1.0` is the same as when the lockfile was generated + +", + ) + .run(); +} + +// If the checksum is unlisted in the lock file (e.g., ) yet we can +// calculate it (e.g., it's a registry dep), then we should in theory just fill +// it in. +#[cargo_test] +fn unlisted_checksum_is_bad_if_we_calculate() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + r#" +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "" +"#, + ); + let p = p.build(); + + p.cargo("fetch") + .with_status(101) + .with_stderr( + "\ +[UPDATING] `[..]` index +error: checksum for `bar v0.1.0` was not previously calculated, but a checksum \ +could now be calculated + +this could be indicative of a few possible situations: + + * the source `[..]` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt + +", + ) + .run(); +} + +// If the checksum is listed in the lock file yet we cannot calculate it (e.g., +// Git dependencies as of today), then make sure we choke. +#[cargo_test] +fn listed_checksum_bad_if_we_cannot_compute() { + let git = git::new("bar", |p| { + p.file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .file( + "Cargo.lock", + &format!( + r#" +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (git+{0})" +] + +[[package]] +name = "bar" +version = "0.1.0" +source = "git+{0}" + +[metadata] +"checksum bar 0.1.0 (git+{0})" = "checksum" +"#, + git.url() + ), + ); + + let p = p.build(); + + p.cargo("fetch") + .with_status(101) + .with_stderr( + "\ +[UPDATING] git repository `[..]` +error: checksum for `bar v0.1.0 ([..])` could not be calculated, but a \ +checksum is listed in the existing lock file[..] + +this could be indicative of a few possible situations: + + * the source `[..]` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `bar v0.1.0 ([..])` is the same as when the lockfile was generated + +", + ) + .run(); +} + +#[cargo_test] +fn current_lockfile_format() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").run(); + + let actual = p.read_lockfile(); + + let expected = "\ +# This file is automatically @generated by Cargo.\n# It is not intended for manual editing. +[[package]] +name = \"bar\" +version = \"0.1.0\" +source = \"registry+https://github.com/rust-lang/crates.io-index\" + +[[package]] +name = \"foo\" +version = \"0.0.1\" +dependencies = [ + \"bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\", +] + +[metadata] +\"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)\" = \"[..]\""; + + for (l, r) in expected.lines().zip(actual.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(actual.lines().count(), expected.lines().count()); +} + +#[cargo_test] +fn lockfile_without_root() { + Package::new("bar", "0.1.0").publish(); + + let lockfile = r#" +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] +"#; + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("Cargo.lock", lockfile); + + let p = p.build(); + + p.cargo("build").run(); + + let lock = p.read_lockfile(); + assert!(lock.starts_with(lockfile.trim())); +} + +#[cargo_test] +fn locked_correct_error() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", ""); + let p = p.build(); + + p.cargo("build --locked") + .with_status(101) + .with_stderr( + "\ +[UPDATING] `[..]` index +error: the lock file [CWD]/Cargo.lock needs to be updated but --locked was passed to prevent this +", + ) + .run(); +} + +#[cargo_test] +fn v2_format_preserved() { + let cksum = Package::new("bar", "0.1.0").publish(); + + let lockfile = format!( + r#"# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "{}" + +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "bar", +] +"#, + cksum + ); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("Cargo.lock", &lockfile) + .build(); + + p.cargo("fetch").run(); + + let lock = p.read_lockfile(); + for (l, r) in lockfile.lines().zip(lock.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(lock.lines().count(), lockfile.lines().count()); +} + +#[cargo_test] +fn v2_path_and_crates_io() { + let cksum010 = Package::new("a", "0.1.0").publish(); + let cksum020 = Package::new("a", "0.2.0").publish(); + + let lockfile = format!( + r#"# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "a" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "{}" + +[[package]] +name = "a" +version = "0.2.0" + +[[package]] +name = "a" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "{}" + +[[package]] +name = "foo" +version = "0.0.1" +dependencies = [ + "a 0.1.0", + "a 0.2.0", + "a 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] +"#, + cksum010, cksum020, + ); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = 'a' } + b = { version = "0.1", package = 'a' } + c = { version = "0.2", package = 'a' } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.2.0" + "#, + ) + .file("a/src/lib.rs", "") + .file("Cargo.lock", &lockfile) + .build(); + + p.cargo("fetch").run(); + p.cargo("fetch").run(); + + let lock = p.read_lockfile(); + for (l, r) in lockfile.lines().zip(lock.lines()) { + assert!(lines_match(l, r), "Lines differ:\n{}\n\n{}", l, r); + } + + assert_eq!(lock.lines().count(), lockfile.lines().count()); +} diff --git a/tests/testsuite/login.rs b/tests/testsuite/login.rs new file mode 100644 index 00000000000..a9476159e44 --- /dev/null +++ b/tests/testsuite/login.rs @@ -0,0 +1,140 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use crate::support::cargo_process; +use crate::support::install::cargo_home; +use crate::support::registry::{self, registry_url}; +use cargo::core::Shell; +use cargo::util::config::Config; +use toml; + +const TOKEN: &str = "test-token"; +const ORIGINAL_TOKEN: &str = "api-token"; + +fn setup_new_credentials() { + let config = cargo_home().join("credentials"); + t!(fs::create_dir_all(config.parent().unwrap())); + t!(t!(File::create(&config)) + .write_all(format!(r#"token = "{token}""#, token = ORIGINAL_TOKEN).as_bytes())); +} + +fn check_token(expected_token: &str, registry: Option<&str>) -> bool { + let credentials = cargo_home().join("credentials"); + assert!(credentials.is_file()); + + let mut contents = String::new(); + File::open(&credentials) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + let toml: toml::Value = contents.parse().unwrap(); + + let token = match (registry, toml) { + // A registry has been provided, so check that the token exists in a + // table for the registry. + (Some(registry), toml::Value::Table(table)) => table + .get("registries") + .and_then(|registries_table| registries_table.get(registry)) + .and_then(|registry_table| match registry_table.get("token") { + Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()), + _ => None, + }), + // There is no registry provided, so check the global token instead. + (None, toml::Value::Table(table)) => table + .get("registry") + .and_then(|registry_table| registry_table.get("token")) + .and_then(|v| match v { + toml::Value::String(ref token) => Some(token.as_str().to_string()), + _ => None, + }), + _ => None, + }; + + if let Some(token_val) = token { + token_val == expected_token + } else { + false + } +} + +#[cargo_test] +fn login_with_old_credentials() { + registry::init(); + + cargo_process("login --host") + .arg(registry_url().to_string()) + .arg(TOKEN) + .run(); + + // Ensure that we get the new token for the registry + assert!(check_token(TOKEN, None)); +} + +#[cargo_test] +fn login_with_new_credentials() { + registry::init(); + setup_new_credentials(); + + cargo_process("login --host") + .arg(registry_url().to_string()) + .arg(TOKEN) + .run(); + + // Ensure that we get the new token for the registry + assert!(check_token(TOKEN, None)); +} + +#[cargo_test] +fn login_with_old_and_new_credentials() { + setup_new_credentials(); + login_with_old_credentials(); +} + +#[cargo_test] +fn login_without_credentials() { + registry::init(); + cargo_process("login --host") + .arg(registry_url().to_string()) + .arg(TOKEN) + .run(); + + // Ensure that we get the new token for the registry + assert!(check_token(TOKEN, None)); +} + +#[cargo_test] +fn new_credentials_is_used_instead_old() { + registry::init(); + setup_new_credentials(); + + cargo_process("login --host") + .arg(registry_url().to_string()) + .arg(TOKEN) + .run(); + + let config = Config::new(Shell::new(), cargo_home(), cargo_home()); + + let token = config.get_string("registry.token").unwrap().map(|p| p.val); + assert_eq!(token.unwrap(), TOKEN); +} + +#[cargo_test] +fn registry_credentials() { + registry::init(); + setup_new_credentials(); + + let reg = "alternative"; + + cargo_process("login --registry") + .arg(reg) + .arg(TOKEN) + .arg("-Zunstable-options") + .masquerade_as_nightly_cargo() + .run(); + + // Ensure that we have not updated the default token + assert!(check_token(ORIGINAL_TOKEN, None)); + + // Also ensure that we get the new token for the registry + assert!(check_token(TOKEN, Some(reg))); +} diff --git a/tests/testsuite/main.rs b/tests/testsuite/main.rs new file mode 100644 index 00000000000..a8a2d7cc309 --- /dev/null +++ b/tests/testsuite/main.rs @@ -0,0 +1,107 @@ +#![warn(rust_2018_idioms)] // while we're getting used to 2018 +#![cfg_attr(feature = "deny-warnings", deny(warnings))] +#![allow(clippy::blacklisted_name)] +#![allow(clippy::explicit_iter_loop)] +#![allow(clippy::redundant_closure)] +#![warn(clippy::needless_borrow)] +#![warn(clippy::redundant_clone)] + +#[macro_use] +extern crate cargo_test_macro; + +#[macro_use] +mod support; + +mod alt_registry; +mod bad_config; +mod bad_manifest_path; +mod bench; +mod build; +mod build_auth; +mod build_lib; +mod build_plan; +mod build_script; +mod build_script_env; +mod cache_messages; +mod cargo_alias_config; +mod cargo_command; +mod cargo_features; +mod cfg; +mod cfg_features; +mod check; +mod clean; +mod clippy; +mod collisions; +mod concurrent; +mod config; +mod corrupt_git; +mod cross_compile; +mod cross_publish; +mod custom_target; +mod death; +mod dep_info; +mod directory; +mod doc; +mod edition; +mod features; +mod fetch; +mod fix; +mod freshness; +mod generate_lockfile; +mod git; +mod init; +mod install; +mod install_upgrade; +mod jobserver; +mod list_targets; +mod local_registry; +mod lockfile_compat; +mod login; +mod member_errors; +mod metabuild; +mod metadata; +mod net_config; +mod new; +mod offline; +mod out_dir; +mod overrides; +mod package; +mod patch; +mod path; +mod plugins; +mod proc_macro; +mod profile_config; +mod profile_overrides; +mod profile_targets; +mod profiles; +mod pub_priv; +mod publish; +mod publish_lockfile; +mod read_manifest; +mod registry; +mod rename_deps; +mod required_features; +mod resolve; +mod run; +mod rustc; +mod rustc_info_cache; +mod rustdoc; +mod rustdocflags; +mod rustflags; +mod search; +mod shell_quoting; +mod small_fd_limits; +mod test; +mod tool_paths; +mod update; +mod vendor; +mod verify_project; +mod version; +mod warn_on_failure; +mod workspaces; + +#[cargo_test] +fn aaa_trigger_cross_compile_disabled_check() { + // This triggers the cross compile disabled check to run ASAP, see #5141 + support::cross_compile::disabled(); +} diff --git a/tests/testsuite/member_errors.rs b/tests/testsuite/member_errors.rs new file mode 100644 index 00000000000..a4d17d8f1c7 --- /dev/null +++ b/tests/testsuite/member_errors.rs @@ -0,0 +1,158 @@ +use cargo::core::resolver::ResolveError; +use cargo::core::{compiler::CompileMode, Shell, Workspace}; +use cargo::ops::{self, CompileOptions}; +use cargo::util::{config::Config, errors::ManifestError}; + +use crate::support::install::cargo_home; +use crate::support::project; +use crate::support::registry; + +/// Tests inclusion of a `ManifestError` pointing to a member manifest +/// when that manifest fails to deserialize. +#[cargo_test] +fn toml_deserialize_manifest_error() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foobar == "0.55" + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + let root_manifest_path = p.root().join("Cargo.toml"); + let member_manifest_path = p.root().join("bar").join("Cargo.toml"); + + let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err(); + eprintln!("{:?}", error); + + let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError"); + assert_eq!(manifest_err.manifest_path(), &root_manifest_path); + + let causes: Vec<_> = manifest_err.manifest_causes().collect(); + assert_eq!(causes.len(), 1, "{:?}", causes); + assert_eq!(causes[0].manifest_path(), &member_manifest_path); +} + +/// Tests inclusion of a `ManifestError` pointing to a member manifest +/// when that manifest has an invalid dependency path. +#[cargo_test] +fn member_manifest_path_io_error() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foobar = { path = "nosuch" } + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + let root_manifest_path = p.root().join("Cargo.toml"); + let member_manifest_path = p.root().join("bar").join("Cargo.toml"); + let missing_manifest_path = p.root().join("bar").join("nosuch").join("Cargo.toml"); + + let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err(); + eprintln!("{:?}", error); + + let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError"); + assert_eq!(manifest_err.manifest_path(), &root_manifest_path); + + let causes: Vec<_> = manifest_err.manifest_causes().collect(); + assert_eq!(causes.len(), 2, "{:?}", causes); + assert_eq!(causes[0].manifest_path(), &member_manifest_path); + assert_eq!(causes[1].manifest_path(), &missing_manifest_path); +} + +/// Tests dependency version errors provide which package failed via a `ResolveError`. +#[cargo_test] +fn member_manifest_version_error() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + i-dont-exist = "0.55" + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + // Prevent this test from accessing the network by setting up .cargo/config. + registry::init(); + let config = Config::new(Shell::new(), cargo_home(), cargo_home()); + let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap(); + let compile_options = CompileOptions::new(&config, CompileMode::Build).unwrap(); + let member_bar = ws.members().find(|m| &*m.name() == "bar").unwrap(); + + let error = ops::compile(&ws, &compile_options).map(|_| ()).unwrap_err(); + eprintln!("{:?}", error); + + let resolve_err: &ResolveError = error.downcast_ref().expect("Not a ResolveError"); + let package_path = resolve_err.package_path(); + assert_eq!(package_path.len(), 1, "package_path: {:?}", package_path); + assert_eq!(package_path[0], member_bar.package_id()); +} diff --git a/tests/testsuite/metabuild.rs b/tests/testsuite/metabuild.rs new file mode 100644 index 00000000000..d060d407aed --- /dev/null +++ b/tests/testsuite/metabuild.rs @@ -0,0 +1,758 @@ +use crate::support::{ + basic_lib_manifest, basic_manifest, is_coarse_mtime, project, registry::Package, rustc_host, + Project, +}; +use serde_json; +use std::str; + +#[cargo_test] +fn metabuild_gated() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + metabuild = ["mb"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + feature `metabuild` is required + +consider adding `cargo-features = [\"metabuild\"]` to the manifest +", + ) + .run(); +} + +fn basic_project() -> Project { + project() + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "foo" + version = "0.0.1" + metabuild = ["mb", "mb-other"] + + [build-dependencies] + mb = {path="mb"} + mb-other = {path="mb-other"} + "#, + ) + .file("src/lib.rs", "") + .file("mb/Cargo.toml", &basic_lib_manifest("mb")) + .file( + "mb/src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb"); }"#, + ) + .file( + "mb-other/Cargo.toml", + r#" + [package] + name = "mb-other" + version = "0.0.1" + "#, + ) + .file( + "mb-other/src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb-other"); }"#, + ) + .build() +} + +#[cargo_test] +fn metabuild_basic() { + let p = basic_project(); + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[foo 0.0.1] Hello mb") + .with_stdout_contains("[foo 0.0.1] Hello mb-other") + .run(); +} + +#[cargo_test] +fn metabuild_error_both() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "foo" + version = "0.0.1" + metabuild = "mb" + + [build-dependencies] + mb = {path="mb"} + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", r#"fn main() {}"#) + .file("mb/Cargo.toml", &basic_lib_manifest("mb")) + .file( + "mb/src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb"); }"#, + ) + .build(); + + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains( + "\ +error: failed to parse manifest at [..] + +Caused by: + cannot specify both `metabuild` and `build` +", + ) + .run(); +} + +#[cargo_test] +fn metabuild_missing_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "foo" + version = "0.0.1" + metabuild = "mb" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains( + "\ +error: failed to parse manifest at [..] + +Caused by: + metabuild package `mb` must be specified in `build-dependencies`", + ) + .run(); +} + +#[cargo_test] +fn metabuild_optional_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "foo" + version = "0.0.1" + metabuild = "mb" + + [build-dependencies] + mb = {path="mb", optional=true} + "#, + ) + .file("src/lib.rs", "") + .file("mb/Cargo.toml", &basic_lib_manifest("mb")) + .file( + "mb/src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb"); }"#, + ) + .build(); + + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_stdout_does_not_contain("[foo 0.0.1] Hello mb") + .run(); + + p.cargo("build -vv --features mb") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[foo 0.0.1] Hello mb") + .run(); +} + +#[cargo_test] +fn metabuild_lib_name() { + // Test when setting `name` on [lib]. + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "foo" + version = "0.0.1" + metabuild = "mb" + + [build-dependencies] + mb = {path="mb"} + "#, + ) + .file("src/lib.rs", "") + .file( + "mb/Cargo.toml", + r#" + [package] + name = "mb" + version = "0.0.1" + [lib] + name = "other" + "#, + ) + .file( + "mb/src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb"); }"#, + ) + .build(); + + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[foo 0.0.1] Hello mb") + .run(); +} + +#[cargo_test] +fn metabuild_fresh() { + if is_coarse_mtime() { + // This test doesn't work on coarse mtimes very well. Because the + // metabuild script is created at build time, its mtime is almost + // always equal to the mtime of the output. The second call to `build` + // will then think it needs to be rebuilt when it should be fresh. + return; + } + + // Check that rebuild is fresh. + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "foo" + version = "0.0.1" + metabuild = "mb" + + [build-dependencies] + mb = {path="mb"} + "#, + ) + .file("src/lib.rs", "") + .file("mb/Cargo.toml", &basic_lib_manifest("mb")) + .file( + "mb/src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb"); }"#, + ) + .build(); + + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[foo 0.0.1] Hello mb") + .run(); + + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_stdout_does_not_contain("[foo 0.0.1] Hello mb") + .with_stderr( + "\ +[FRESH] mb [..] +[FRESH] foo [..] +[FINISHED] dev [..] +", + ) + .run(); +} + +#[cargo_test] +fn metabuild_links() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "foo" + version = "0.0.1" + links = "cat" + metabuild = "mb" + + [build-dependencies] + mb = {path="mb"} + "#, + ) + .file("src/lib.rs", "") + .file("mb/Cargo.toml", &basic_lib_manifest("mb")) + .file( + "mb/src/lib.rs", + r#"pub fn metabuild() { + assert_eq!(std::env::var("CARGO_MANIFEST_LINKS"), + Ok("cat".to_string())); + println!("Hello mb"); + }"#, + ) + .build(); + + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[foo 0.0.1] Hello mb") + .run(); +} + +#[cargo_test] +fn metabuild_override() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "foo" + version = "0.0.1" + links = "cat" + metabuild = "mb" + + [build-dependencies] + mb = {path="mb"} + "#, + ) + .file("src/lib.rs", "") + .file("mb/Cargo.toml", &basic_lib_manifest("mb")) + .file( + "mb/src/lib.rs", + r#"pub fn metabuild() { panic!("should not run"); }"#, + ) + .file( + ".cargo/config", + &format!( + r#" + [target.{}.cat] + rustc-link-lib = ["a"] + "#, + rustc_host() + ), + ) + .build(); + + p.cargo("build -vv").masquerade_as_nightly_cargo().run(); +} + +#[cargo_test] +fn metabuild_workspace() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["member1", "member2"] + "#, + ) + .file( + "member1/Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "member1" + version = "0.0.1" + metabuild = ["mb1", "mb2"] + + [build-dependencies] + mb1 = {path="../../mb1"} + mb2 = {path="../../mb2"} + "#, + ) + .file("member1/src/lib.rs", "") + .file( + "member2/Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "member2" + version = "0.0.1" + metabuild = ["mb1"] + + [build-dependencies] + mb1 = {path="../../mb1"} + "#, + ) + .file("member2/src/lib.rs", "") + .build(); + + project() + .at("mb1") + .file("Cargo.toml", &basic_lib_manifest("mb1")) + .file( + "src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, + ) + .build(); + + project() + .at("mb2") + .file("Cargo.toml", &basic_lib_manifest("mb2")) + .file( + "src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, + ) + .build(); + + p.cargo("build -vv --all") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1") + .with_stdout_contains("[member1 0.0.1] Hello mb2 [..]member1") + .with_stdout_contains("[member2 0.0.1] Hello mb1 [..]member2") + .with_stdout_does_not_contain("[member2 0.0.1] Hello mb2 [..]member2") + .run(); +} + +#[cargo_test] +fn metabuild_metadata() { + // The metabuild Target is filtered out of the `metadata` results. + let p = basic_project(); + + let output = p + .cargo("metadata --format-version=1") + .masquerade_as_nightly_cargo() + .exec_with_output() + .expect("cargo metadata failed"); + let stdout = str::from_utf8(&output.stdout).unwrap(); + let meta: serde_json::Value = serde_json::from_str(stdout).expect("failed to parse json"); + let mb_info: Vec<&str> = meta["packages"] + .as_array() + .unwrap() + .iter() + .find(|p| p["name"].as_str().unwrap() == "foo") + .unwrap()["metabuild"] + .as_array() + .unwrap() + .iter() + .map(|s| s.as_str().unwrap()) + .collect(); + assert_eq!(mb_info, ["mb", "mb-other"]); +} + +#[cargo_test] +fn metabuild_build_plan() { + let p = basic_project(); + + p.cargo("build --build-plan -Zunstable-options") + .masquerade_as_nightly_cargo() + .with_json( + r#" +{ + "invocations": [ + { + "package_name": "mb", + "package_version": "0.5.0", + "target_kind": ["lib"], + "compile_mode": "build", + "kind": "Host", + "deps": [], + "outputs": [ + "[..]/target/debug/deps/libmb-[..].rlib", + "[..]/target/debug/deps/libmb-[..].rmeta" + ], + "links": {}, + "program": "rustc", + "args": "{...}", + "env": "{...}", + "cwd": "[..]" + }, + { + "package_name": "mb-other", + "package_version": "0.0.1", + "target_kind": ["lib"], + "compile_mode": "build", + "kind": "Host", + "deps": [], + "outputs": [ + "[..]/target/debug/deps/libmb_other-[..].rlib", + "[..]/target/debug/deps/libmb_other-[..].rmeta" + ], + "links": {}, + "program": "rustc", + "args": "{...}", + "env": "{...}", + "cwd": "[..]" + }, + { + "package_name": "foo", + "package_version": "0.0.1", + "target_kind": ["custom-build"], + "compile_mode": "build", + "kind": "Host", + "deps": [0, 1], + "outputs": ["[..]/target/debug/build/foo-[..]/metabuild_foo-[..][EXE]"], + "links": "{...}", + "program": "rustc", + "args": "{...}", + "env": "{...}", + "cwd": "[..]" + }, + { + "package_name": "foo", + "package_version": "0.0.1", + "target_kind": ["custom-build"], + "compile_mode": "run-custom-build", + "kind": "Host", + "deps": [2], + "outputs": [], + "links": {}, + "program": "[..]/foo/target/debug/build/foo-[..]/metabuild-foo", + "args": [], + "env": "{...}", + "cwd": "[..]" + }, + { + "package_name": "foo", + "package_version": "0.0.1", + "target_kind": ["lib"], + "compile_mode": "build", + "kind": "Host", + "deps": [3], + "outputs": [ + "[..]/foo/target/debug/deps/libfoo-[..].rlib", + "[..]/foo/target/debug/deps/libfoo-[..].rmeta" + ], + "links": "{...}", + "program": "rustc", + "args": "{...}", + "env": "{...}", + "cwd": "[..]" + } + ], + "inputs": [ + "[..]/foo/Cargo.toml", + "[..]/foo/mb/Cargo.toml", + "[..]/foo/mb-other/Cargo.toml" + ] +} +"#, + ) + .run(); + + assert_eq!(p.glob("target/.metabuild/metabuild-foo-*.rs").count(), 1); +} + +#[cargo_test] +fn metabuild_two_versions() { + // Two versions of a metabuild dep with the same name. + let p = project() + .at("ws") + .file( + "Cargo.toml", + r#" + [workspace] + members = ["member1", "member2"] + "#, + ) + .file( + "member1/Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "member1" + version = "0.0.1" + metabuild = ["mb"] + + [build-dependencies] + mb = {path="../../mb1"} + "#, + ) + .file("member1/src/lib.rs", "") + .file( + "member2/Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "member2" + version = "0.0.1" + metabuild = ["mb"] + + [build-dependencies] + mb = {path="../../mb2"} + "#, + ) + .file("member2/src/lib.rs", "") + .build(); + + project().at("mb1") + .file("Cargo.toml", r#" + [package] + name = "mb" + version = "0.0.1" + "#) + .file( + "src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, + ) + .build(); + + project().at("mb2") + .file("Cargo.toml", r#" + [package] + name = "mb" + version = "0.0.2" + "#) + .file( + "src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, + ) + .build(); + + p.cargo("build -vv --all") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1") + .with_stdout_contains("[member2 0.0.1] Hello mb2 [..]member2") + .run(); + + assert_eq!( + p.glob("target/.metabuild/metabuild-member?-*.rs").count(), + 2 + ); +} + +#[cargo_test] +fn metabuild_external_dependency() { + Package::new("mb", "1.0.0") + .file("Cargo.toml", &basic_manifest("mb", "1.0.0")) + .file( + "src/lib.rs", + r#"pub fn metabuild() { println!("Hello mb"); }"#, + ) + .publish(); + Package::new("dep", "1.0.0") + .file( + "Cargo.toml", + r#" + cargo-features = ["metabuild"] + [package] + name = "dep" + version = "1.0.0" + metabuild = ["mb"] + + [build-dependencies] + mb = "1.0" + "#, + ) + .file("src/lib.rs", "") + .build_dep("mb", "1.0.0") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + [dependencies] + dep = "1.0" + "#, + ) + .file("src/lib.rs", "extern crate dep;") + .build(); + + p.cargo("build -vv") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[dep 1.0.0] Hello mb") + .run(); + + assert_eq!(p.glob("target/.metabuild/metabuild-dep-*.rs").count(), 1); +} + +#[cargo_test] +fn metabuild_json_artifact() { + let p = basic_project(); + p.cargo("build --message-format=json") + .masquerade_as_nightly_cargo() + .with_json_contains_unordered( + r#" +{ + "executable": null, + "features": [], + "filenames": [ + "[..]/foo/target/debug/build/foo-[..]/metabuild-foo[EXE]" + ], + "fresh": false, + "package_id": "foo [..]", + "profile": "{...}", + "reason": "compiler-artifact", + "target": { + "crate_types": [ + "bin" + ], + "doctest": false, + "edition": "2018", + "kind": [ + "custom-build" + ], + "name": "metabuild-foo", + "src_path": "[..]/foo/target/.metabuild/metabuild-foo-[..].rs" + } +} + +{ + "cfgs": [], + "env": [], + "linked_libs": [], + "linked_paths": [], + "package_id": "foo [..]", + "reason": "build-script-executed" +} +"#, + ) + .run(); +} + +#[cargo_test] +fn metabuild_failed_build_json() { + let p = basic_project(); + // Modify the metabuild dep so that it fails to compile. + p.change_file("mb/src/lib.rs", ""); + p.cargo("build --message-format=json") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_json_contains_unordered( + r#" +{ + "message": { + "children": "{...}", + "code": "{...}", + "level": "error", + "message": "cannot find function `metabuild` in module `mb`", + "rendered": "[..]", + "spans": "{...}" + }, + "package_id": "foo [..]", + "reason": "compiler-message", + "target": { + "crate_types": [ + "bin" + ], + "doctest": false, + "edition": "2018", + "kind": [ + "custom-build" + ], + "name": "metabuild-foo", + "src_path": null + } +} +"#, + ) + .run(); +} diff --git a/tests/testsuite/metadata.rs b/tests/testsuite/metadata.rs new file mode 100644 index 00000000000..33f67344ac4 --- /dev/null +++ b/tests/testsuite/metadata.rs @@ -0,0 +1,1726 @@ +use crate::support::registry::Package; +use crate::support::{basic_bin_manifest, basic_lib_manifest, main_file, project}; + +#[cargo_test] +fn cargo_metadata_simple() { + let p = project() + .file("src/foo.rs", "") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .build(); + + p.cargo("metadata") + .with_json( + r#" + { + "packages": [ + { + "authors": [ + "wycats@example.com" + ], + "categories": [], + "name": "foo", + "version": "0.5.0", + "id": "foo[..]", + "keywords": [], + "source": null, + "dependencies": [], + "edition": "2015", + "license": null, + "license_file": null, + "links": null, + "description": null, + "readme": null, + "repository": null, + "targets": [ + { + "kind": [ + "bin" + ], + "crate_types": [ + "bin" + ], + "doctest": false, + "edition": "2015", + "name": "foo", + "src_path": "[..]/foo/src/foo.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml", + "metadata": null + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "foo 0.5.0 (path+file:[..]foo)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn cargo_metadata_warns_on_implicit_version() { + let p = project() + .file("src/foo.rs", "") + .file("Cargo.toml", &basic_bin_manifest("foo")) + .build(); + + p.cargo("metadata").with_stderr("[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems").run(); + + p.cargo("metadata --format-version 1").with_stderr("").run(); +} + +#[cargo_test] +fn library_with_several_crate_types() { + let p = project() + .file("src/lib.rs", "") + .file( + "Cargo.toml", + r#" +[package] +name = "foo" +version = "0.5.0" + +[lib] +crate-type = ["lib", "staticlib"] + "#, + ) + .build(); + + p.cargo("metadata") + .with_json( + r#" + { + "packages": [ + { + "authors": [], + "categories": [], + "name": "foo", + "readme": null, + "repository": null, + "version": "0.5.0", + "id": "foo[..]", + "keywords": [], + "source": null, + "dependencies": [], + "edition": "2015", + "license": null, + "license_file": null, + "links": null, + "description": null, + "targets": [ + { + "kind": [ + "lib", + "staticlib" + ], + "crate_types": [ + "lib", + "staticlib" + ], + "doctest": true, + "edition": "2015", + "name": "foo", + "src_path": "[..]/foo/src/lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml", + "metadata": null + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "foo 0.5.0 (path+file:[..]foo)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn library_with_features() { + let p = project() + .file("src/lib.rs", "") + .file( + "Cargo.toml", + r#" +[package] +name = "foo" +version = "0.5.0" + +[features] +default = ["default_feat"] +default_feat = [] +optional_feat = [] + "#, + ) + .build(); + + p.cargo("metadata") + .with_json( + r#" + { + "packages": [ + { + "authors": [], + "categories": [], + "name": "foo", + "readme": null, + "repository": null, + "version": "0.5.0", + "id": "foo[..]", + "keywords": [], + "source": null, + "dependencies": [], + "edition": "2015", + "license": null, + "license_file": null, + "links": null, + "description": null, + "targets": [ + { + "kind": [ + "lib" + ], + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "name": "foo", + "src_path": "[..]/foo/src/lib.rs" + } + ], + "features": { + "default": [ + null, + [ + "default_feat" + ] + ], + "default_feat": [ + null, + [] + ], + "optional_feat": [ + null, + [] + ] + }, + "manifest_path": "[..]Cargo.toml", + "metadata": null + } + ], + "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [ + "default", + "default_feat" + ], + "id": "foo 0.5.0 (path+file:[..]foo)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn cargo_metadata_with_deps_and_version() { + let p = project() + .file("src/foo.rs", "") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + license = "MIT" + description = "foo" + + [[bin]] + name = "foo" + + [dependencies] + bar = "*" + [dev-dependencies] + foobar = "*" + "#, + ) + .build(); + Package::new("baz", "0.0.1").publish(); + Package::new("foobar", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish(); + + p.cargo("metadata -q --format-version 1") + .with_json( + r#" + { + "packages": [ + { + "authors": [], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2015", + "features": {}, + "id": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]Cargo.toml", + "metadata": null, + "name": "baz", + "readme": null, + "repository": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "baz", + "src_path": "[..]src/lib.rs" + } + ], + "version": "0.0.1" + }, + { + "authors": [], + "categories": [], + "dependencies": [ + { + "features": [], + "kind": null, + "name": "bar", + "optional": false, + "registry": null, + "rename": null, + "req": "*", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "target": null, + "uses_default_features": true + }, + { + "features": [], + "kind": "dev", + "name": "foobar", + "optional": false, + "registry": null, + "rename": null, + "req": "*", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "target": null, + "uses_default_features": true + } + ], + "description": "foo", + "edition": "2015", + "features": {}, + "id": "foo 0.5.0 (path+file:[..]foo)", + "keywords": [], + "license": "MIT", + "license_file": null, + "links": null, + "manifest_path": "[..]Cargo.toml", + "metadata": null, + "name": "foo", + "readme": null, + "repository": null, + "source": null, + "targets": [ + { + "crate_types": [ + "bin" + ], + "doctest": false, + "edition": "2015", + "kind": [ + "bin" + ], + "name": "foo", + "src_path": "[..]src/foo.rs" + } + ], + "version": "0.5.0" + }, + { + "authors": [], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2015", + "features": {}, + "id": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]Cargo.toml", + "metadata": null, + "name": "foobar", + "readme": null, + "repository": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "foobar", + "src_path": "[..]src/lib.rs" + } + ], + "version": "0.0.1" + }, + { + "authors": [], + "categories": [], + "dependencies": [ + { + "features": [], + "kind": null, + "name": "baz", + "optional": false, + "registry": null, + "rename": null, + "req": "^0.0.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "target": null, + "uses_default_features": true + } + ], + "description": null, + "edition": "2015", + "features": {}, + "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]Cargo.toml", + "metadata": null, + "name": "bar", + "readme": null, + "repository": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "bar", + "src_path": "[..]src/lib.rs" + } + ], + "version": "0.0.1" + } + ], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" + }, + { + "dependencies": [], + "deps": [], + "features": [], + "id": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" + }, + { + "dependencies": [ + "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" + ], + "deps": [ + { + "name": "bar", + "pkg": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" + }, + { + "name": "foobar", + "pkg": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" + } + ], + "features": [], + "id": "foo 0.5.0 (path+file:[..]foo)" + }, + { + "dependencies": [ + "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" + ], + "deps": [ + { + "name": "baz", + "pkg": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" + } + ], + "features": [], + "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" + } + ], + "root": "foo 0.5.0 (path+file:[..]foo)" + }, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_members": [ + "foo 0.5.0 (path+file:[..]foo)" + ], + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn example() { + let p = project() + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .file( + "Cargo.toml", + r#" +[package] +name = "foo" +version = "0.1.0" + +[[example]] +name = "ex" + "#, + ) + .build(); + + p.cargo("metadata") + .with_json( + r#" + { + "packages": [ + { + "authors": [], + "categories": [], + "name": "foo", + "readme": null, + "repository": null, + "version": "0.1.0", + "id": "foo[..]", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "description": null, + "edition": "2015", + "source": null, + "dependencies": [], + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "doctest": true, + "edition": "2015", + "name": "foo", + "src_path": "[..]/foo/src/lib.rs" + }, + { + "kind": [ "example" ], + "crate_types": [ "bin" ], + "doctest": false, + "edition": "2015", + "name": "ex", + "src_path": "[..]/foo/examples/ex.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml", + "metadata": null + } + ], + "workspace_members": [ + "foo 0.1.0 (path+file:[..]foo)" + ], + "resolve": { + "root": "foo 0.1.0 (path+file://[..]foo)", + "nodes": [ + { + "id": "foo 0.1.0 (path+file:[..]foo)", + "features": [], + "dependencies": [], + "deps": [] + } + ] + }, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn example_lib() { + let p = project() + .file("src/lib.rs", "") + .file("examples/ex.rs", "") + .file( + "Cargo.toml", + r#" +[package] +name = "foo" +version = "0.1.0" + +[[example]] +name = "ex" +crate-type = ["rlib", "dylib"] + "#, + ) + .build(); + + p.cargo("metadata") + .with_json( + r#" + { + "packages": [ + { + "authors": [], + "categories": [], + "name": "foo", + "readme": null, + "repository": null, + "version": "0.1.0", + "id": "foo[..]", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "description": null, + "edition": "2015", + "source": null, + "dependencies": [], + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "doctest": true, + "edition": "2015", + "name": "foo", + "src_path": "[..]/foo/src/lib.rs" + }, + { + "kind": [ "example" ], + "crate_types": [ "rlib", "dylib" ], + "doctest": false, + "edition": "2015", + "name": "ex", + "src_path": "[..]/foo/examples/ex.rs" + } + ], + "features": {}, + "manifest_path": "[..]Cargo.toml", + "metadata": null + } + ], + "workspace_members": [ + "foo 0.1.0 (path+file:[..]foo)" + ], + "resolve": { + "root": "foo 0.1.0 (path+file://[..]foo)", + "nodes": [ + { + "id": "foo 0.1.0 (path+file:[..]foo)", + "features": [], + "dependencies": [], + "deps": [] + } + ] + }, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn workspace_metadata() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file("baz/src/lib.rs", "") + .build(); + + p.cargo("metadata") + .with_json( + r#" + { + "packages": [ + { + "authors": [ + "wycats@example.com" + ], + "categories": [], + "name": "bar", + "version": "0.5.0", + "id": "bar[..]", + "readme": null, + "repository": null, + "keywords": [], + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "links": null, + "description": null, + "edition": "2015", + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "doctest": true, + "edition": "2015", + "name": "bar", + "src_path": "[..]bar/src/lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]bar/Cargo.toml", + "metadata": null + }, + { + "authors": [ + "wycats@example.com" + ], + "categories": [], + "name": "baz", + "readme": null, + "repository": null, + "version": "0.5.0", + "id": "baz[..]", + "keywords": [], + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "links": null, + "description": null, + "edition": "2015", + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "doctest": true, + "edition": "2015", + "name": "baz", + "src_path": "[..]baz/src/lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]baz/Cargo.toml", + "metadata": null + } + ], + "workspace_members": ["baz 0.5.0 (path+file:[..]baz)", "bar 0.5.0 (path+file:[..]bar)"], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "baz 0.5.0 (path+file:[..]baz)" + }, + { + "dependencies": [], + "deps": [], + "features": [], + "id": "bar 0.5.0 (path+file:[..]bar)" + } + ], + "root": null + }, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn workspace_metadata_no_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file("baz/src/lib.rs", "") + .build(); + + p.cargo("metadata --no-deps") + .with_json( + r#" + { + "packages": [ + { + "authors": [ + "wycats@example.com" + ], + "categories": [], + "name": "bar", + "readme": null, + "repository": null, + "version": "0.5.0", + "id": "bar[..]", + "keywords": [], + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "links": null, + "description": null, + "edition": "2015", + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "doctest": true, + "edition": "2015", + "name": "bar", + "src_path": "[..]bar/src/lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]bar/Cargo.toml", + "metadata": null + }, + { + "authors": [ + "wycats@example.com" + ], + "categories": [], + "name": "baz", + "readme": null, + "repository": null, + "version": "0.5.0", + "id": "baz[..]", + "keywords": [], + "source": null, + "dependencies": [], + "license": null, + "license_file": null, + "links": null, + "description": null, + "edition": "2015", + "targets": [ + { + "kind": [ "lib" ], + "crate_types": ["lib"], + "doctest": true, + "edition": "2015", + "name": "baz", + "src_path": "[..]baz/src/lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]baz/Cargo.toml", + "metadata": null + } + ], + "workspace_members": ["baz 0.5.0 (path+file:[..]baz)", "bar 0.5.0 (path+file:[..]bar)"], + "resolve": null, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn cargo_metadata_with_invalid_manifest() { + let p = project().file("Cargo.toml", "").build(); + + p.cargo("metadata --format-version 1") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + virtual manifests must be configured with [workspace]", + ) + .run(); +} + +const MANIFEST_OUTPUT: &str = r#" +{ + "packages": [{ + "authors": [ + "wycats@example.com" + ], + "categories": [], + "name":"foo", + "version":"0.5.0", + "id":"foo[..]0.5.0[..](path+file://[..]/foo)", + "source":null, + "dependencies":[], + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "description": null, + "edition": "2015", + "targets":[{ + "kind":["bin"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"foo", + "src_path":"[..]/foo/src/foo.rs" + }], + "features":{}, + "manifest_path":"[..]Cargo.toml", + "metadata": null, + "readme": null, + "repository": null + }], + "workspace_members": [ "foo 0.5.0 (path+file:[..]foo)" ], + "resolve": null, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" +}"#; + +#[cargo_test] +fn cargo_metadata_no_deps_path_to_cargo_toml_relative() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("metadata --no-deps --manifest-path foo/Cargo.toml") + .cwd(p.root().parent().unwrap()) + .with_json(MANIFEST_OUTPUT) + .run(); +} + +#[cargo_test] +fn cargo_metadata_no_deps_path_to_cargo_toml_absolute() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("metadata --no-deps --manifest-path") + .arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()) + .with_json(MANIFEST_OUTPUT) + .run(); +} + +#[cargo_test] +fn cargo_metadata_no_deps_path_to_cargo_toml_parent_relative() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("metadata --no-deps --manifest-path foo") + .cwd(p.root().parent().unwrap()) + .with_status(101) + .with_stderr( + "[ERROR] the manifest-path must be \ + a path to a Cargo.toml file", + ) + .run(); +} + +#[cargo_test] +fn cargo_metadata_no_deps_path_to_cargo_toml_parent_absolute() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("metadata --no-deps --manifest-path") + .arg(p.root()) + .cwd(p.root().parent().unwrap()) + .with_status(101) + .with_stderr( + "[ERROR] the manifest-path must be \ + a path to a Cargo.toml file", + ) + .run(); +} + +#[cargo_test] +fn cargo_metadata_no_deps_cwd() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("metadata --no-deps") + .with_json(MANIFEST_OUTPUT) + .run(); +} + +#[cargo_test] +fn cargo_metadata_bad_version() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("metadata --no-deps --format-version 2") + .with_status(1) + .with_stderr_contains( + "\ +error: '2' isn't a valid value for '--format-version ' +[possible values: 1] +", + ) + .run(); +} + +#[cargo_test] +fn multiple_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + a = [] + b = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("metadata --features").arg("a b").run(); +} + +#[cargo_test] +fn package_metadata() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = ["wycats@example.com"] + categories = ["database"] + keywords = ["database"] + readme = "README.md" + repository = "https://github.com/rust-lang/cargo" + + [package.metadata.bar] + baz = "quux" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("metadata --no-deps") + .with_json( + r#" + { + "packages": [ + { + "authors": ["wycats@example.com"], + "categories": ["database"], + "name": "foo", + "readme": "README.md", + "repository": "https://github.com/rust-lang/cargo", + "version": "0.1.0", + "id": "foo[..]", + "keywords": ["database"], + "source": null, + "dependencies": [], + "edition": "2015", + "license": null, + "license_file": null, + "links": null, + "description": null, + "targets": [ + { + "kind": [ "lib" ], + "crate_types": [ "lib" ], + "doctest": true, + "edition": "2015", + "name": "foo", + "src_path": "[..]foo/src/lib.rs" + } + ], + "features": {}, + "manifest_path": "[..]foo/Cargo.toml", + "metadata": { + "bar": { + "baz": "quux" + } + } + } + ], + "workspace_members": ["foo[..]"], + "resolve": null, + "target_directory": "[..]foo/target", + "version": 1, + "workspace_root": "[..]/foo" + }"#, + ) + .run(); +} + +#[cargo_test] +fn cargo_metadata_path_to_cargo_toml_project() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + "#, + ) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("package --manifest-path") + .arg(p.root().join("bar/Cargo.toml")) + .cwd(p.root().parent().unwrap()) + .run(); + + p.cargo("metadata --manifest-path") + .arg(p.root().join("target/package/bar-0.5.0/Cargo.toml")) + .with_json( + r#" + { + "packages": [ + { + "authors": [ + "wycats@example.com" + ], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2015", + "features": {}, + "id": "bar 0.5.0 ([..])", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]Cargo.toml", + "metadata": null, + "name": "bar", + "readme": null, + "repository": null, + "source": null, + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "bar", + "src_path": "[..]src/lib.rs" + } + ], + "version": "0.5.0" + } + ], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "bar 0.5.0 ([..])" + } + ], + "root": "bar 0.5.0 (path+file:[..])" + }, + "target_directory": "[..]", + "version": 1, + "workspace_members": [ + "bar 0.5.0 (path+file:[..])" + ], + "workspace_root": "[..]" + } +"#, + ) + .run(); +} + +#[cargo_test] +fn package_edition_2018() { + let p = project() + .file("src/lib.rs", "") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = ["wycats@example.com"] + edition = "2018" + "#, + ) + .build(); + p.cargo("metadata") + .with_json( + r#" + { + "packages": [ + { + "authors": [ + "wycats@example.com" + ], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2018", + "features": {}, + "id": "foo 0.1.0 (path+file:[..])", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]Cargo.toml", + "metadata": null, + "name": "foo", + "readme": null, + "repository": null, + "source": null, + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2018", + "kind": [ + "lib" + ], + "name": "foo", + "src_path": "[..]src/lib.rs" + } + ], + "version": "0.1.0" + } + ], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "foo 0.1.0 (path+file:[..])" + } + ], + "root": "foo 0.1.0 (path+file:[..])" + }, + "target_directory": "[..]", + "version": 1, + "workspace_members": [ + "foo 0.1.0 (path+file:[..])" + ], + "workspace_root": "[..]" + } + "#, + ) + .run(); +} + +#[cargo_test] +fn target_edition_2018() { + let p = project() + .file("src/lib.rs", "") + .file("src/main.rs", "") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = ["wycats@example.com"] + edition = "2015" + + [lib] + edition = "2018" + "#, + ) + .build(); + p.cargo("metadata") + .with_json( + r#" + { + "packages": [ + { + "authors": [ + "wycats@example.com" + ], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2015", + "features": {}, + "id": "foo 0.1.0 (path+file:[..])", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]Cargo.toml", + "metadata": null, + "name": "foo", + "readme": null, + "repository": null, + "source": null, + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2018", + "kind": [ + "lib" + ], + "name": "foo", + "src_path": "[..]src/lib.rs" + }, + { + "crate_types": [ + "bin" + ], + "doctest": false, + "edition": "2015", + "kind": [ + "bin" + ], + "name": "foo", + "src_path": "[..]src/main.rs" + } + ], + "version": "0.1.0" + } + ], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "foo 0.1.0 (path+file:[..])" + } + ], + "root": "foo 0.1.0 (path+file:[..])" + }, + "target_directory": "[..]", + "version": 1, + "workspace_members": [ + "foo 0.1.0 (path+file:[..])" + ], + "workspace_root": "[..]" + } + "#, + ) + .run(); +} + +#[cargo_test] +fn rename_dependency() { + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.2.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { version = "0.1.0" } + baz = { version = "0.2.0", package = "bar" } + "#, + ) + .file("src/lib.rs", "extern crate bar; extern crate baz;") + .build(); + + p.cargo("metadata") + .with_json( + r#" +{ + "packages": [ + { + "authors": [], + "categories": [], + "dependencies": [ + { + "features": [], + "kind": null, + "name": "bar", + "optional": false, + "rename": null, + "registry": null, + "req": "^0.1.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "target": null, + "uses_default_features": true + }, + { + "features": [], + "kind": null, + "name": "bar", + "optional": false, + "rename": "baz", + "registry": null, + "req": "^0.2.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "target": null, + "uses_default_features": true + } + ], + "description": null, + "edition": "2015", + "features": {}, + "id": "foo 0.0.1[..]", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]", + "metadata": null, + "name": "foo", + "readme": null, + "repository": null, + "source": null, + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "foo", + "src_path": "[..]" + } + ], + "version": "0.0.1" + }, + { + "authors": [], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2015", + "features": {}, + "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]", + "metadata": null, + "name": "bar", + "readme": null, + "repository": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "bar", + "src_path": "[..]" + } + ], + "version": "0.1.0" + }, + { + "authors": [], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2015", + "features": {}, + "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]", + "metadata": null, + "name": "bar", + "readme": null, + "repository": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "bar", + "src_path": "[..]" + } + ], + "version": "0.2.0" + } + ], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" + }, + { + "dependencies": [], + "deps": [], + "features": [], + "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" + }, + { + "dependencies": [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" + ], + "deps": [ + { + "name": "bar", + "pkg": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" + }, + { + "name": "baz", + "pkg": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" + } + ], + "features": [], + "id": "foo 0.0.1[..]" + } + ], + "root": "foo 0.0.1[..]" + }, + "target_directory": "[..]", + "version": 1, + "workspace_members": [ + "foo 0.0.1[..]" + ], + "workspace_root": "[..]" +}"#, + ) + .run(); +} + +#[cargo_test] +fn metadata_links() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + links = "a" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .build(); + + p.cargo("metadata") + .with_json( + r#" +{ + "packages": [ + { + "authors": [], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2015", + "features": {}, + "id": "foo 0.5.0 [..]", + "keywords": [], + "license": null, + "license_file": null, + "links": "a", + "manifest_path": "[..]/foo/Cargo.toml", + "metadata": null, + "name": "foo", + "readme": null, + "repository": null, + "source": null, + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "foo", + "src_path": "[..]/foo/src/lib.rs" + }, + { + "crate_types": [ + "bin" + ], + "doctest": false, + "edition": "2015", + "kind": [ + "custom-build" + ], + "name": "build-script-build", + "src_path": "[..]/foo/build.rs" + } + ], + "version": "0.5.0" + } + ], + "resolve": { + "nodes": [ + { + "dependencies": [], + "deps": [], + "features": [], + "id": "foo 0.5.0 [..]" + } + ], + "root": "foo 0.5.0 [..]" + }, + "target_directory": "[..]/foo/target", + "version": 1, + "workspace_members": [ + "foo 0.5.0 [..]" + ], + "workspace_root": "[..]/foo" +} +"#, + ) + .run() +} + +#[cargo_test] +fn deps_with_bin_only() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [dependencies] + bdep = { path = "bdep" } + "#, + ) + .file("src/lib.rs", "") + .file("bdep/Cargo.toml", &basic_bin_manifest("bdep")) + .file("bdep/src/main.rs", "fn main() {}") + .build(); + + let output = p + .cargo("metadata") + .exec_with_output() + .expect("cargo metadata failed"); + let stdout = std::str::from_utf8(&output.stdout).unwrap(); + let meta: serde_json::Value = serde_json::from_str(stdout).expect("failed to parse json"); + let nodes = &meta["resolve"]["nodes"]; + assert!(nodes[0]["deps"].as_array().unwrap().is_empty()); + assert!(nodes[1]["deps"].as_array().unwrap().is_empty()); +} diff --git a/tests/testsuite/net_config.rs b/tests/testsuite/net_config.rs new file mode 100644 index 00000000000..7d8456ca776 --- /dev/null +++ b/tests/testsuite/net_config.rs @@ -0,0 +1,72 @@ +use crate::support::project; + +#[cargo_test] +fn net_retry_loads_from_config() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "http://127.0.0.1:11/foo/bar" + "#, + ) + .file("src/main.rs", "") + .file( + ".cargo/config", + r#" + [net] + retry=1 + [http] + timeout=1 + "#, + ) + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr_contains( + "[WARNING] spurious network error \ + (1 tries remaining): [..]", + ) + .run(); +} + +#[cargo_test] +fn net_retry_git_outputs_warning() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + git = "http://127.0.0.1:11/foo/bar" + "#, + ) + .file( + ".cargo/config", + r#" + [http] + timeout=1 + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build -v -j 1") + .with_status(101) + .with_stderr_contains( + "[WARNING] spurious network error \ + (2 tries remaining): [..]", + ) + .with_stderr_contains("[WARNING] spurious network error (1 tries remaining): [..]") + .run(); +} diff --git a/tests/testsuite/new.rs b/tests/testsuite/new.rs new file mode 100644 index 00000000000..ee6408026e4 --- /dev/null +++ b/tests/testsuite/new.rs @@ -0,0 +1,529 @@ +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; + +use crate::support::paths; +use crate::support::{cargo_process, git_process}; + +fn create_empty_gitconfig() { + // This helps on Windows where libgit2 is very aggressive in attempting to + // find a git config file. + let gitconfig = paths::home().join(".gitconfig"); + File::create(gitconfig).unwrap(); +} + +#[cargo_test] +fn simple_lib() { + cargo_process("new --lib foo --vcs none --edition 2015") + .env("USER", "foo") + .with_stderr("[CREATED] library `foo` package") + .run(); + + assert!(paths::root().join("foo").is_dir()); + assert!(paths::root().join("foo/Cargo.toml").is_file()); + assert!(paths::root().join("foo/src/lib.rs").is_file()); + assert!(!paths::root().join("foo/.gitignore").is_file()); + + let lib = paths::root().join("foo/src/lib.rs"); + let mut contents = String::new(); + File::open(&lib) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert_eq!( + contents, + r#"#[cfg(test)] +mod tests { + #[test] + fn it_works() { + assert_eq!(2 + 2, 4); + } +} +"# + ); + + cargo_process("build").cwd(&paths::root().join("foo")).run(); +} + +#[cargo_test] +fn simple_bin() { + cargo_process("new --bin foo --edition 2015") + .env("USER", "foo") + .with_stderr("[CREATED] binary (application) `foo` package") + .run(); + + assert!(paths::root().join("foo").is_dir()); + assert!(paths::root().join("foo/Cargo.toml").is_file()); + assert!(paths::root().join("foo/src/main.rs").is_file()); + + cargo_process("build").cwd(&paths::root().join("foo")).run(); + assert!(paths::root() + .join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)) + .is_file()); +} + +#[cargo_test] +fn both_lib_and_bin() { + cargo_process("new --lib --bin foo") + .env("USER", "foo") + .with_status(101) + .with_stderr("[ERROR] can't specify both lib and binary outputs") + .run(); +} + +#[cargo_test] +fn simple_git() { + cargo_process("new --lib foo --edition 2015") + .env("USER", "foo") + .run(); + + assert!(paths::root().is_dir()); + assert!(paths::root().join("foo/Cargo.toml").is_file()); + assert!(paths::root().join("foo/src/lib.rs").is_file()); + assert!(paths::root().join("foo/.git").is_dir()); + assert!(paths::root().join("foo/.gitignore").is_file()); + + let fp = paths::root().join("foo/.gitignore"); + let mut contents = String::new(); + File::open(&fp) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert_eq!(contents, "/target\n**/*.rs.bk\nCargo.lock\n",); + + cargo_process("build").cwd(&paths::root().join("foo")).run(); +} + +#[cargo_test] +fn no_argument() { + cargo_process("new") + .with_status(1) + .with_stderr_contains( + "\ +error: The following required arguments were not provided: + +", + ) + .run(); +} + +#[cargo_test] +fn existing() { + let dst = paths::root().join("foo"); + fs::create_dir(&dst).unwrap(); + cargo_process("new foo") + .with_status(101) + .with_stderr( + "[ERROR] destination `[CWD]/foo` already exists\n\n\ + Use `cargo init` to initialize the directory", + ) + .run(); +} + +#[cargo_test] +fn invalid_characters() { + cargo_process("new foo.rs") + .with_status(101) + .with_stderr( + "\ +[ERROR] Invalid character `.` in crate name: `foo.rs` +use --name to override crate name", + ) + .run(); +} + +#[cargo_test] +fn reserved_name() { + cargo_process("new test") + .with_status(101) + .with_stderr( + "[ERROR] The name `test` cannot be used as a crate name\n\ + use --name to override crate name", + ) + .run(); +} + +#[cargo_test] +fn reserved_binary_name() { + cargo_process("new --bin incremental") + .with_status(101) + .with_stderr( + "[ERROR] The name `incremental` cannot be used as a crate name\n\ + use --name to override crate name", + ) + .run(); +} + +#[cargo_test] +fn keyword_name() { + cargo_process("new pub") + .with_status(101) + .with_stderr( + "[ERROR] The name `pub` cannot be used as a crate name\n\ + use --name to override crate name", + ) + .run(); +} + +#[cargo_test] +fn finds_author_user() { + create_empty_gitconfig(); + cargo_process("new foo").env("USER", "foo").run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo"]"#)); +} + +#[cargo_test] +fn finds_author_user_escaped() { + create_empty_gitconfig(); + cargo_process("new foo").env("USER", "foo \"bar\"").run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo \"bar\""]"#)); +} + +#[cargo_test] +fn finds_author_username() { + create_empty_gitconfig(); + cargo_process("new foo") + .env_remove("USER") + .env("USERNAME", "foo") + .run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo"]"#)); +} + +#[cargo_test] +fn finds_author_priority() { + cargo_process("new foo") + .env("USER", "bar2") + .env("EMAIL", "baz2") + .env("CARGO_NAME", "bar") + .env("CARGO_EMAIL", "baz") + .run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[cargo_test] +fn finds_author_email() { + create_empty_gitconfig(); + cargo_process("new foo") + .env("USER", "bar") + .env("EMAIL", "baz") + .run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[cargo_test] +fn finds_author_git() { + git_process("config --global user.name bar").exec().unwrap(); + git_process("config --global user.email baz") + .exec() + .unwrap(); + cargo_process("new foo").env("USER", "foo").run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[cargo_test] +fn finds_local_author_git() { + git_process("init").exec().unwrap(); + git_process("config --global user.name foo").exec().unwrap(); + git_process("config --global user.email foo@bar") + .exec() + .unwrap(); + + // Set local git user config + git_process("config user.name bar").exec().unwrap(); + git_process("config user.email baz").exec().unwrap(); + cargo_process("init").env("USER", "foo").run(); + + let toml = paths::root().join("Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[cargo_test] +fn finds_git_email() { + cargo_process("new foo") + .env("GIT_AUTHOR_NAME", "foo") + .env("GIT_AUTHOR_EMAIL", "gitfoo") + .run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["foo "]"#), contents); +} + +#[cargo_test] +fn finds_git_author() { + create_empty_gitconfig(); + cargo_process("new foo") + .env_remove("USER") + .env("GIT_COMMITTER_NAME", "gitfoo") + .run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["gitfoo"]"#)); +} + +#[cargo_test] +fn author_prefers_cargo() { + git_process("config --global user.name foo").exec().unwrap(); + git_process("config --global user.email bar") + .exec() + .unwrap(); + let root = paths::root(); + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(&root.join(".cargo/config")) + .unwrap() + .write_all( + br#" + [cargo-new] + name = "new-foo" + email = "new-bar" + vcs = "none" + "#, + ) + .unwrap(); + + cargo_process("new foo").env("USER", "foo").run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["new-foo "]"#)); + assert!(!root.join("foo/.gitignore").exists()); +} + +#[cargo_test] +fn strip_angle_bracket_author_email() { + create_empty_gitconfig(); + cargo_process("new foo") + .env("USER", "bar") + .env("EMAIL", "") + .run(); + + let toml = paths::root().join("foo/Cargo.toml"); + let mut contents = String::new(); + File::open(&toml) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + assert!(contents.contains(r#"authors = ["bar "]"#)); +} + +#[cargo_test] +fn git_prefers_command_line() { + let root = paths::root(); + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(&root.join(".cargo/config")) + .unwrap() + .write_all( + br#" + [cargo-new] + vcs = "none" + name = "foo" + email = "bar" + "#, + ) + .unwrap(); + + cargo_process("new foo --vcs git").env("USER", "foo").run(); + assert!(paths::root().join("foo/.gitignore").exists()); +} + +#[cargo_test] +fn subpackage_no_git() { + cargo_process("new foo").env("USER", "foo").run(); + + assert!(paths::root().join("foo/.git").is_dir()); + assert!(paths::root().join("foo/.gitignore").is_file()); + + let subpackage = paths::root().join("foo").join("components"); + fs::create_dir(&subpackage).unwrap(); + cargo_process("new foo/components/subcomponent") + .env("USER", "foo") + .run(); + + assert!(!paths::root() + .join("foo/components/subcomponent/.git") + .is_file()); + assert!(!paths::root() + .join("foo/components/subcomponent/.gitignore") + .is_file()); +} + +#[cargo_test] +fn subpackage_git_with_gitignore() { + cargo_process("new foo").env("USER", "foo").run(); + + assert!(paths::root().join("foo/.git").is_dir()); + assert!(paths::root().join("foo/.gitignore").is_file()); + + let gitignore = paths::root().join("foo/.gitignore"); + fs::write(gitignore, b"components").unwrap(); + + let subpackage = paths::root().join("foo/components"); + fs::create_dir(&subpackage).unwrap(); + cargo_process("new foo/components/subcomponent") + .env("USER", "foo") + .run(); + + assert!(paths::root() + .join("foo/components/subcomponent/.git") + .is_dir()); + assert!(paths::root() + .join("foo/components/subcomponent/.gitignore") + .is_file()); +} + +#[cargo_test] +fn subpackage_git_with_vcs_arg() { + cargo_process("new foo").env("USER", "foo").run(); + + let subpackage = paths::root().join("foo").join("components"); + fs::create_dir(&subpackage).unwrap(); + cargo_process("new foo/components/subcomponent --vcs git") + .env("USER", "foo") + .run(); + + assert!(paths::root() + .join("foo/components/subcomponent/.git") + .is_dir()); + assert!(paths::root() + .join("foo/components/subcomponent/.gitignore") + .is_file()); +} + +#[cargo_test] +fn unknown_flags() { + cargo_process("new foo --flag") + .with_status(1) + .with_stderr_contains( + "error: Found argument '--flag' which wasn't expected, or isn't valid in this context", + ) + .run(); +} + +#[cargo_test] +fn explicit_invalid_name_not_suggested() { + cargo_process("new --name 10-invalid a") + .with_status(101) + .with_stderr("[ERROR] Package names starting with a digit cannot be used as a crate name") + .run(); +} + +#[cargo_test] +fn explicit_project_name() { + cargo_process("new --lib foo --name bar") + .env("USER", "foo") + .with_stderr("[CREATED] library `bar` package") + .run(); +} + +#[cargo_test] +fn new_with_edition_2015() { + cargo_process("new --edition 2015 foo") + .env("USER", "foo") + .run(); + let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); + assert!(manifest.contains("edition = \"2015\"")); +} + +#[cargo_test] +fn new_with_edition_2018() { + cargo_process("new --edition 2018 foo") + .env("USER", "foo") + .run(); + let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); + assert!(manifest.contains("edition = \"2018\"")); +} + +#[cargo_test] +fn new_default_edition() { + cargo_process("new foo").env("USER", "foo").run(); + let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); + assert!(manifest.contains("edition = \"2018\"")); +} + +#[cargo_test] +fn new_with_bad_edition() { + cargo_process("new --edition something_else foo") + .env("USER", "foo") + .with_stderr_contains("error: 'something_else' isn't a valid value[..]") + .with_status(1) + .run(); +} + +#[cargo_test] +fn new_with_blank_email() { + cargo_process("new foo") + .env("CARGO_NAME", "Sen") + .env("CARGO_EMAIL", "") + .run(); + + let contents = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); + assert!(contents.contains(r#"authors = ["Sen"]"#), contents); +} + +#[cargo_test] +fn new_with_reference_link() { + cargo_process("new foo").env("USER", "foo").run(); + + let contents = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); + assert!(contents.contains("# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html")) +} diff --git a/tests/testsuite/offline.rs b/tests/testsuite/offline.rs new file mode 100644 index 00000000000..b466e896f68 --- /dev/null +++ b/tests/testsuite/offline.rs @@ -0,0 +1,537 @@ +use crate::support::{basic_manifest, git, main_file, path2url, project, registry::Package}; +use std::fs; + +#[cargo_test] +fn offline_unused_target_dep() { + // --offline with a target dependency that is not used and not downloaded. + Package::new("unused_dep", "1.0.0").publish(); + Package::new("used_dep", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + [dependencies] + used_dep = "1.0" + [target.'cfg(unused)'.dependencies] + unused_dep = "1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + // Do a build that downloads only what is necessary. + p.cargo("build") + .with_stderr_contains("[DOWNLOADED] used_dep [..]") + .with_stderr_does_not_contain("[DOWNLOADED] unused_dep [..]") + .run(); + p.cargo("clean").run(); + // Build offline, make sure it works. + p.cargo("build --offline").run(); +} + +#[cargo_test] +fn offline_missing_optional() { + Package::new("opt_dep", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + [dependencies] + opt_dep = { version = "1.0", optional = true } + "#, + ) + .file("src/lib.rs", "") + .build(); + // Do a build that downloads only what is necessary. + p.cargo("build") + .with_stderr_does_not_contain("[DOWNLOADED] opt_dep [..]") + .run(); + p.cargo("clean").run(); + // Build offline, make sure it works. + p.cargo("build --offline").run(); + p.cargo("build --offline --features=opt_dep") + .with_stderr( + "\ +[ERROR] failed to download `opt_dep v1.0.0` + +Caused by: + can't make HTTP request in the offline mode +", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn cargo_compile_path_with_offline() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build --offline").run(); +} + +#[cargo_test] +fn cargo_compile_with_downloaded_dependency_with_offline() { + Package::new("present_dep", "1.2.3") + .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3")) + .file("src/lib.rs", "") + .publish(); + + // make package downloaded + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + present_dep = "1.2.3" + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build").run(); + + let p2 = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + + [dependencies] + present_dep = "1.2.3" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p2.cargo("build --offline") + .with_stderr( + "\ +[COMPILING] present_dep v1.2.3 +[COMPILING] bar v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_offline_not_try_update() { + let p = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + + [dependencies] + not_cached_dep = "1.2.5" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build --offline") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to load source for a dependency on `not_cached_dep` + +Caused by: + Unable to update registry `https://github.com/rust-lang/crates.io-index` + +Caused by: + unable to fetch registry `https://github.com/rust-lang/crates.io-index` in offline mode +Try running without the offline flag, or try running `cargo fetch` within your \ +project directory before going offline. +", + ) + .run(); + + p.change_file(".cargo/config", "net.offline = true"); + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]Unable to update registry[..]") + .run(); +} + +#[cargo_test] +fn compile_offline_without_maxvers_cached() { + Package::new("present_dep", "1.2.1").publish(); + Package::new("present_dep", "1.2.2").publish(); + + Package::new("present_dep", "1.2.3") + .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3")) + .file( + "src/lib.rs", + r#"pub fn get_version()->&'static str {"1.2.3"}"#, + ) + .publish(); + + Package::new("present_dep", "1.2.5") + .file("Cargo.toml", &basic_manifest("present_dep", "1.2.5")) + .file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#) + .publish(); + + // make package cached + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + present_dep = "=1.2.3" + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build").run(); + + let p2 = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + present_dep = "1.2" + "#, + ) + .file( + "src/main.rs", + "\ +extern crate present_dep; +fn main(){ + println!(\"{}\", present_dep::get_version()); +}", + ) + .build(); + + p2.cargo("run --offline") + .with_stderr( + "\ +[COMPILING] present_dep v1.2.3 +[COMPILING] foo v0.1.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] + Running `[..]`", + ) + .with_stdout("1.2.3") + .run(); +} + +#[cargo_test] +fn cargo_compile_forbird_git_httpsrepo_offline() { + let p = project() + .file( + "Cargo.toml", + r#" + + [project] + name = "foo" + version = "0.5.0" + authors = ["chabapok@example.com"] + + [dependencies.dep1] + git = 'https://github.com/some_user/dep1.git' + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("build --offline").with_status(101).with_stderr("\ +error: failed to load source for a dependency on `dep1` + +Caused by: + Unable to update https://github.com/some_user/dep1.git + +Caused by: + can't checkout from 'https://github.com/some_user/dep1.git': you are in the offline mode (--offline)").run(); +} + +#[cargo_test] +fn compile_offline_while_transitive_dep_not_cached() { + let baz = Package::new("baz", "1.0.0"); + let baz_path = baz.archive_dst(); + baz.publish(); + + let baz_content = fs::read(&baz_path).unwrap(); + // Truncate the file to simulate a download failure. + fs::write(&baz_path, &[]).unwrap(); + + Package::new("bar", "0.1.0").dep("baz", "1.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main(){}") + .build(); + + // simulate download bar, but fail to download baz + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]failed to verify the checksum of `baz[..]") + .run(); + + // Restore the file contents. + fs::write(&baz_path, &baz_content).unwrap(); + + p.cargo("build --offline") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to download `baz v1.0.0` + +Caused by: + can't make HTTP request in the offline mode +", + ) + .run(); +} + +#[cargo_test] +fn update_offline() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + p.cargo("update --offline") + .with_status(101) + .with_stderr("error: you can't update in the offline mode[..]") + .run(); +} + +#[cargo_test] +fn cargo_compile_offline_with_cached_git_dep() { + let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) + .file( + "src/lib.rs", + r#" + pub static COOL_STR:&str = "cached git repo rev1"; + "#, + ) + }) + .unwrap(); + + let repo = git2::Repository::open(&git_project.root()).unwrap(); + let rev1 = repo.revparse_single("HEAD").unwrap().id(); + + // Commit the changes and make sure we trigger a recompile + git_project.change_file( + "src/lib.rs", + r#"pub static COOL_STR:&str = "cached git repo rev2";"#, + ); + git::add(&repo); + let rev2 = git::commit(&repo); + + // cache to registry rev1 and rev2 + let prj = project() + .at("cache_git_dep") + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "cache_git_dep" + version = "0.5.0" + + [dependencies.dep1] + git = '{}' + rev = "{}" + "#, + git_project.url(), + rev1 + ), + ) + .file("src/main.rs", "fn main(){}") + .build(); + prj.cargo("build").run(); + + prj.change_file( + "Cargo.toml", + &format!( + r#" + [project] + name = "cache_git_dep" + version = "0.5.0" + + [dependencies.dep1] + git = '{}' + rev = "{}" + "#, + git_project.url(), + rev2 + ), + ); + prj.cargo("build").run(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + + [dependencies.dep1] + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/main.rs", + &main_file(r#""hello from {}", dep1::COOL_STR"#, &["dep1"]), + ) + .build(); + + let git_root = git_project.root(); + + p.cargo("build --offline") + .with_stderr(format!( + "\ +[COMPILING] dep1 v0.5.0 ({}#[..]) +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + path2url(git_root), + )) + .run(); + + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")) + .with_stdout("hello from cached git repo rev2\n") + .run(); + + p.change_file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.5.0" + + [dependencies.dep1] + git = '{}' + rev = "{}" + "#, + git_project.url(), + rev1 + ), + ); + + p.cargo("build --offline").run(); + p.process(&p.bin("foo")) + .with_stdout("hello from cached git repo rev1\n") + .run(); +} + +#[cargo_test] +fn offline_resolve_optional_fail() { + // Example where resolve fails offline. + // + // This happens if at least 1 version of an optional dependency is + // available, but none of them satisfy the requirements. The current logic + // that handles this is `RegistryIndex::query_inner`, and it doesn't know + // if the package being queried is an optional one. This is not ideal, it + // would be best if it just ignored optional (unselected) dependencies. + Package::new("dep", "1.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + dep = { version = "1.0", optional = true } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("fetch").run(); + + // Change dep to 2.0. + p.change_file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + dep = { version = "2.0", optional = true } + "#, + ); + + p.cargo("build --offline") + .with_status(101) + .with_stderr("\ +[ERROR] failed to select a version for the requirement `dep = \"^2.0\"` + candidate versions found which didn't match: 1.0.0 + location searched: `[..]` index (which is replacing registry `https://github.com/rust-lang/crates.io-index`) +required by package `foo v0.1.0 ([..]/foo)` +perhaps a crate was updated and forgotten to be re-vendored? +As a reminder, you're using offline mode (--offline) which can sometimes cause \ +surprising resolution failures, if this error is too confusing you may wish to \ +retry without the offline flag. +") + .run(); +} diff --git a/tests/testsuite/out_dir.rs b/tests/testsuite/out_dir.rs new file mode 100644 index 00000000000..be83a3a48e6 --- /dev/null +++ b/tests/testsuite/out_dir.rs @@ -0,0 +1,275 @@ +use std::env; +use std::fs::{self, File}; +use std::path::Path; + +use crate::support::sleep_ms; +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn binary_with_debug() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#) + .build(); + + p.cargo("build -Z unstable-options --out-dir out") + .masquerade_as_nightly_cargo() + .run(); + check_dir_contents( + &p.root().join("out"), + &["foo"], + &["foo", "foo.dSYM"], + &["foo.exe", "foo.pdb"], + ); +} + +#[cargo_test] +fn static_library_with_debug() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + crate-type = ["staticlib"] + "#, + ) + .file( + "src/lib.rs", + r#" + #[no_mangle] + pub extern "C" fn foo() { println!("Hello, World!") } + "#, + ) + .build(); + + p.cargo("build -Z unstable-options --out-dir out") + .masquerade_as_nightly_cargo() + .run(); + check_dir_contents( + &p.root().join("out"), + &["libfoo.a"], + &["libfoo.a"], + &["foo.lib"], + ); +} + +#[cargo_test] +fn dynamic_library_with_debug() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + crate-type = ["cdylib"] + "#, + ) + .file( + "src/lib.rs", + r#" + #[no_mangle] + pub extern "C" fn foo() { println!("Hello, World!") } + "#, + ) + .build(); + + p.cargo("build -Z unstable-options --out-dir out") + .masquerade_as_nightly_cargo() + .run(); + check_dir_contents( + &p.root().join("out"), + &["libfoo.so"], + &["libfoo.dylib"], + &["foo.dll", "foo.dll.lib"], + ); +} + +#[cargo_test] +fn rlib_with_debug() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + crate-type = ["rlib"] + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn foo() { println!("Hello, World!") } + "#, + ) + .build(); + + p.cargo("build -Z unstable-options --out-dir out") + .masquerade_as_nightly_cargo() + .run(); + check_dir_contents( + &p.root().join("out"), + &["libfoo.rlib"], + &["libfoo.rlib"], + &["libfoo.rlib"], + ); +} + +#[cargo_test] +fn include_only_the_binary_from_the_current_package() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + + [dependencies] + utils = { path = "./utils" } + "#, + ) + .file("src/lib.rs", "extern crate utils;") + .file( + "src/main.rs", + r#" + extern crate foo; + extern crate utils; + fn main() { + println!("Hello, World!") + } + "#, + ) + .file("utils/Cargo.toml", &basic_manifest("utils", "0.0.1")) + .file("utils/src/lib.rs", "") + .build(); + + p.cargo("build -Z unstable-options --bin foo --out-dir out") + .masquerade_as_nightly_cargo() + .run(); + check_dir_contents( + &p.root().join("out"), + &["foo"], + &["foo", "foo.dSYM"], + &["foo.exe", "foo.pdb"], + ); +} + +#[cargo_test] +fn out_dir_is_a_file() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#) + .build(); + File::create(p.root().join("out")).unwrap(); + + p.cargo("build -Z unstable-options --out-dir out") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains("[ERROR] failed to link or copy [..]") + .run(); +} + +#[cargo_test] +fn replaces_artifacts() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("foo") }"#) + .build(); + + p.cargo("build -Z unstable-options --out-dir out") + .masquerade_as_nightly_cargo() + .run(); + p.process( + &p.root() + .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)), + ) + .with_stdout("foo") + .run(); + + sleep_ms(1000); + p.change_file("src/main.rs", r#"fn main() { println!("bar") }"#); + + p.cargo("build -Z unstable-options --out-dir out") + .masquerade_as_nightly_cargo() + .run(); + p.process( + &p.root() + .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)), + ) + .with_stdout("bar") + .run(); +} + +#[cargo_test] +fn avoid_build_scripts() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/main.rs", "fn main() {}") + .file("a/build.rs", r#"fn main() { println!("hello-build-a"); }"#) + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/main.rs", "fn main() {}") + .file("b/build.rs", r#"fn main() { println!("hello-build-b"); }"#) + .build(); + + p.cargo("build -Z unstable-options --out-dir out -vv") + .masquerade_as_nightly_cargo() + .with_stdout_contains("[a 0.0.1] hello-build-a") + .with_stdout_contains("[b 0.0.1] hello-build-b") + .run(); + check_dir_contents( + &p.root().join("out"), + &["a", "b"], + &["a", "a.dSYM", "b", "b.dSYM"], + &["a.exe", "a.pdb", "b.exe", "b.pdb"], + ); +} + +fn check_dir_contents( + out_dir: &Path, + expected_linux: &[&str], + expected_mac: &[&str], + expected_win: &[&str], +) { + let expected = if cfg!(target_os = "windows") { + expected_win + } else if cfg!(target_os = "macos") { + expected_mac + } else { + expected_linux + }; + + let actual = list_dir(out_dir); + let mut expected = expected.iter().map(|s| s.to_string()).collect::>(); + expected.sort_unstable(); + assert_eq!(actual, expected); +} + +fn list_dir(dir: &Path) -> Vec { + let mut res = Vec::new(); + for entry in fs::read_dir(dir).unwrap() { + let entry = entry.unwrap(); + res.push(entry.file_name().into_string().unwrap()); + } + res.sort_unstable(); + res +} diff --git a/tests/testsuite/overrides.rs b/tests/testsuite/overrides.rs new file mode 100644 index 00000000000..1a9dd1c592f --- /dev/null +++ b/tests/testsuite/overrides.rs @@ -0,0 +1,1412 @@ +use crate::support::git; +use crate::support::paths; +use crate::support::registry::Package; +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn override_simple() { + Package::new("bar", "0.1.0").publish(); + + let bar = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, + bar.url() + ), + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[UPDATING] git repository `[..]` +[COMPILING] bar v0.1.0 (file://[..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn missing_version() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + bar = { git = 'https://example.com' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements must specify a version to replace, but `[..]bar` does not +", + ) + .run(); +} + +#[cargo_test] +fn invalid_semver_version() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + + [replace] + "bar:*" = { git = 'https://example.com' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements must specify a valid semver version to replace, but `bar:*` does not +", + ) + .run(); +} + +#[cargo_test] +fn different_version() { + Package::new("bar", "0.2.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = "0.2.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements cannot specify a version requirement, but found one for [..] +", + ) + .run(); +} + +#[cargo_test] +fn transitive() { + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.2.0") + .dep("bar", "0.1.0") + .file("src/lib.rs", "extern crate bar; fn baz() { bar::bar(); }") + .publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + baz = "0.2.0" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[UPDATING] git repository `[..]` +[DOWNLOADING] crates ... +[DOWNLOADED] baz v0.2.0 (registry [..]) +[COMPILING] bar v0.1.0 (file://[..]) +[COMPILING] baz v0.2.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn persists_across_rebuilds() { + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[UPDATING] git repository `file://[..]` +[COMPILING] bar v0.1.0 (file://[..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn replace_registry_with_path() { + Package::new("bar", "0.1.0").publish(); + + let _ = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = { path = "../bar" } + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[COMPILING] bar v0.1.0 ([ROOT][..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn use_a_spec_to_select() { + Package::new("baz", "0.1.1") + .file("src/lib.rs", "pub fn baz1() {}") + .publish(); + Package::new("baz", "0.2.0").publish(); + Package::new("bar", "0.1.1") + .dep("baz", "0.2") + .file( + "src/lib.rs", + "extern crate baz; pub fn bar() { baz::baz3(); }", + ) + .publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("baz", "0.2.0")) + .file("src/lib.rs", "pub fn baz3() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + baz = "0.1" + + [replace] + "baz:0.2.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate bar; + extern crate baz; + + pub fn local() { + baz::baz1(); + bar::bar(); + } + ", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[UPDATING] git repository `[..]` +[DOWNLOADING] crates ... +[DOWNLOADED] [..] +[DOWNLOADED] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn override_adds_some_deps() { + Package::new("baz", "0.1.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[UPDATING] git repository `[..]` +[DOWNLOADING] crates ... +[DOWNLOADED] baz v0.1.1 (registry [..]) +[COMPILING] baz v0.1.1 +[COMPILING] bar v0.1.0 ([..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build").with_stdout("").run(); + + Package::new("baz", "0.1.2").publish(); + p.cargo("update -p") + .arg(&format!("{}#bar", foo.url())) + .with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[UPDATING] `[ROOT][..]` index +", + ) + .run(); + p.cargo("update -p https://github.com/rust-lang/crates.io-index#bar") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +", + ) + .run(); + + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn locked_means_locked_yes_no_seriously_i_mean_locked() { + // this in theory exercises #2041 + Package::new("baz", "0.1.0").publish(); + Package::new("baz", "0.2.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + baz = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + p.cargo("build").with_stdout("").run(); + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn override_wrong_name() { + Package::new("baz", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + baz = "0.1" + + [replace] + "baz:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +[UPDATING] git repository [..] +error: no matching package for override `[..]baz:0.1.0` found +location searched: file://[..] +version required: = 0.1.0 +", + ) + .run(); +} + +#[cargo_test] +fn override_with_nothing() { + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("src/lib.rs", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [replace] + "bar:0.1.0" = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +[UPDATING] git repository [..] +[ERROR] failed to load source for a dependency on `bar` + +Caused by: + Unable to update file://[..] + +Caused by: + Could not find Cargo.toml in `[..]` +", + ) + .run(); +} + +#[cargo_test] +fn override_wrong_version() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [replace] + "bar:0.1.0" = { git = 'https://example.com', version = '0.2.0' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + replacements cannot specify a version requirement, but found one for `[..]bar:0.1.0` +", + ) + .run(); +} + +#[cargo_test] +fn multiple_specs() { + Package::new("bar", "0.1.0").publish(); + + let bar = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = {{ git = '{0}' }} + + [replace."https://github.com/rust-lang/crates.io-index#bar:0.1.0"] + git = '{0}' + "#, + bar.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +[UPDATING] git repository [..] +error: overlapping replacement specifications found: + + * [..] + * [..] + +both specifications match: bar v0.1.0 +", + ) + .run(); +} + +#[cargo_test] +fn test_override_dep() { + Package::new("bar", "0.1.0").publish(); + + let bar = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = {{ git = '{0}' }} + "#, + bar.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("test -p bar") + .with_status(101) + .with_stderr_contains( + "\ +error: There are multiple `bar` packages in your project, and the [..] +Please re-run this command with [..] + [..]#bar:0.1.0 + [..]#bar:0.1.0 +", + ) + .run(); +} + +#[cargo_test] +fn update() { + Package::new("bar", "0.1.0").publish(); + + let bar = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = {{ git = '{0}' }} + "#, + bar.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("generate-lockfile").run(); + p.cargo("update") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] git repository `[..]` +", + ) + .run(); +} + +// foo -> near -> far +// near is overridden with itself +#[cargo_test] +fn no_override_self() { + let deps = git::repo(&paths::root().join("override")) + .file("far/Cargo.toml", &basic_manifest("far", "0.1.0")) + .file("far/src/lib.rs", "") + .file( + "near/Cargo.toml", + r#" + [package] + name = "near" + version = "0.1.0" + authors = [] + + [dependencies] + far = { path = "../far" } + "#, + ) + .file("near/src/lib.rs", "#![no_std] pub extern crate far;") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + near = {{ git = '{0}' }} + + [replace] + "near:0.1.0" = {{ git = '{0}' }} + "#, + deps.url() + ), + ) + .file("src/lib.rs", "#![no_std] pub extern crate near;") + .build(); + + p.cargo("build --verbose").run(); +} + +#[cargo_test] +fn broken_path_override_warns() { + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.2.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a1" } + "#, + ) + .file("src/lib.rs", "") + .file( + "a1/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + "#, + ) + .file("a1/src/lib.rs", "") + .file( + "a2/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.2" + "#, + ) + .file("a2/src/lib.rs", "") + .file(".cargo/config", r#"paths = ["a2"]"#) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] [..] +warning: path override for crate `a` has altered the original list of +dependencies; the dependency on `bar` was either added or +modified to not match the previously resolved version + +This is currently allowed but is known to produce buggy behavior with spurious +recompiles and changes to the crate graph. Path overrides unfortunately were +never intended to support this feature, so for now this message is just a +warning. In the future, however, this message will become a hard error. + +To change the dependency graph via an override it's recommended to use the +`[replace]` feature of Cargo instead of the path override feature. This is +documented online at the url below for more information. + +https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#overriding-dependencies + +[DOWNLOADING] crates ... +[DOWNLOADED] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn override_an_override() { + Package::new("chrono", "0.2.0") + .dep("serde", "< 0.9") + .publish(); + Package::new("serde", "0.7.0") + .file("src/lib.rs", "pub fn serde07() {}") + .publish(); + Package::new("serde", "0.8.0") + .file("src/lib.rs", "pub fn serde08() {}") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + chrono = "0.2" + serde = "0.8" + + [replace] + "chrono:0.2.0" = { path = "chrono" } + "serde:0.8.0" = { path = "serde" } + "#, + ) + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.0.1" + dependencies = [ + "chrono 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "chrono" + version = "0.2.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + replace = "chrono 0.2.0" + + [[package]] + name = "chrono" + version = "0.2.0" + dependencies = [ + "serde 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + ] + + [[package]] + name = "serde" + version = "0.7.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + + [[package]] + name = "serde" + version = "0.8.0" + source = "registry+https://github.com/rust-lang/crates.io-index" + replace = "serde 0.8.0" + + [[package]] + name = "serde" + version = "0.8.0" + "#, + ) + .file( + "src/lib.rs", + " + extern crate chrono; + extern crate serde; + + pub fn foo() { + chrono::chrono(); + serde::serde08_override(); + } + ", + ) + .file( + "chrono/Cargo.toml", + r#" + [package] + name = "chrono" + version = "0.2.0" + authors = [] + + [dependencies] + serde = "< 0.9" + "#, + ) + .file( + "chrono/src/lib.rs", + " + extern crate serde; + pub fn chrono() { + serde::serde07(); + } + ", + ) + .file("serde/Cargo.toml", &basic_manifest("serde", "0.8.0")) + .file("serde/src/lib.rs", "pub fn serde08_override() {}") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn overriding_nonexistent_no_spurious() { + Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); + Package::new("baz", "0.1.0").publish(); + + let bar = git::repo(&paths::root().join("override")) + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#, + ) + .file("src/lib.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = {{ git = '{url}' }} + "baz:0.1.0" = {{ git = '{url}' }} + "#, + url = bar.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("build") + .with_stderr( + "\ +[WARNING] package replacement is not used: [..]baz:0.1.0 +[FINISHED] [..] +", + ) + .with_stdout("") + .run(); +} + +#[cargo_test] +fn no_warnings_when_replace_is_used_in_another_workspace_member() { + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = [ "first_crate", "second_crate"] + + [replace] + "bar:0.1.0" = { path = "local_bar" }"#, + ) + .file( + "first_crate/Cargo.toml", + r#" + [package] + name = "first_crate" + version = "0.1.0" + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("first_crate/src/lib.rs", "") + .file( + "second_crate/Cargo.toml", + &basic_manifest("second_crate", "0.1.0"), + ) + .file("second_crate/src/lib.rs", "") + .file("local_bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("local_bar/src/lib.rs", "") + .build(); + + p.cargo("build") + .cwd("first_crate") + .with_stdout("") + .with_stderr( + "\ +[UPDATING] `[..]` index +[COMPILING] bar v0.1.0 ([..]) +[COMPILING] first_crate v0.1.0 ([..]) +[FINISHED] [..]", + ) + .run(); + + p.cargo("build") + .cwd("second_crate") + .with_stdout("") + .with_stderr( + "\ +[COMPILING] second_crate v0.1.0 ([..]) +[FINISHED] [..]", + ) + .run(); +} + +#[cargo_test] +fn override_to_path_dep() { + Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); + Package::new("baz", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#, + ) + .file("bar/src/lib.rs", "") + .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("bar/baz/src/lib.rs", "") + .file(".cargo/config", r#"paths = ["bar"]"#) + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn replace_to_path_dep() { + Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); + Package::new("baz", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [replace] + "bar:0.1.0" = { path = "bar" } + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#, + ) + .file( + "bar/src/lib.rs", + "extern crate baz; pub fn bar() { baz::baz(); }", + ) + .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("bar/baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn paths_ok_with_optional() { + Package::new("baz", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = { version = "0.1", optional = true } + "#, + ) + .file("bar/src/lib.rs", "") + .file( + "bar2/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = { version = "0.1", optional = true } + "#, + ) + .file("bar2/src/lib.rs", "") + .file(".cargo/config", r#"paths = ["bar2"]"#) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.1.0 ([..]bar2) +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn paths_add_optional_bad() { + Package::new("baz", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .file( + "bar2/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = { version = "0.1", optional = true } + "#, + ) + .file("bar2/src/lib.rs", "") + .file(".cargo/config", r#"paths = ["bar2"]"#) + .build(); + + p.cargo("build") + .with_stderr_contains( + "\ +warning: path override for crate `bar` has altered the original list of +dependencies; the dependency on `baz` was either added or\ +", + ) + .run(); +} + +#[cargo_test] +fn override_with_default_feature() { + Package::new("another", "0.1.0").publish(); + Package::new("another", "0.1.1").dep("bar", "0.1").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar", default-features = false } + another = "0.1" + another2 = { path = "another2" } + + [replace] + 'bar:0.1.0' = { path = "bar" } + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [features] + default = [] + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[cfg(feature = "default")] + pub fn bar() {} + "#, + ) + .file( + "another2/Cargo.toml", + r#" + [package] + name = "another2" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", default-features = false } + "#, + ) + .file("another2/src/lib.rs", "") + .build(); + + p.cargo("run").run(); +} + +#[cargo_test] +fn override_plus_dep() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [replace] + 'bar:0.1.0' = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = ".." } + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains("error: cyclic package dependency: [..]") + .run(); +} diff --git a/tests/testsuite/package.rs b/tests/testsuite/package.rs new file mode 100644 index 00000000000..92be2b2eee2 --- /dev/null +++ b/tests/testsuite/package.rs @@ -0,0 +1,1344 @@ +use std; +use std::fs::File; +use std::io::prelude::*; +use std::path::Path; + +use crate::support::cargo_process; +use crate::support::paths::CargoPathExt; +use crate::support::registry::Package; +use crate::support::{ + basic_manifest, git, path2url, paths, project, publish::validate_crate_contents, registry, +}; +use git2; + +#[cargo_test] +fn simple() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .file("src/bar.txt", "") // should be ignored when packaging + .build(); + + p.cargo("package") + .with_stderr( + "\ +[WARNING] manifest has no documentation[..] +See [..] +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); + p.cargo("package -l") + .with_stdout( + "\ +Cargo.lock +Cargo.toml +src/main.rs +", + ) + .run(); + p.cargo("package").with_stdout("").run(); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + validate_crate_contents( + f, + "foo-0.0.1.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + &[], + ); +} + +#[cargo_test] +fn metadata_warning() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("package") + .with_stderr( + "\ +warning: manifest has no description, license, license-file, documentation, \ +homepage or repository. +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + p.cargo("package") + .with_stderr( + "\ +warning: manifest has no description, documentation, homepage or repository. +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + p.cargo("package") + .with_stderr( + "\ +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn package_verbose() { + let root = paths::root().join("all"); + let repo = git::repo(&root) + .file("Cargo.toml", &basic_manifest("foo", "0.0.1")) + .file("src/main.rs", "fn main() {}") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + cargo_process("build").cwd(repo.root()).run(); + + println!("package main repo"); + cargo_process("package -v --no-verify") + .cwd(repo.root()) + .with_stderr( + "\ +[WARNING] manifest has no description[..] +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] Cargo.toml +[ARCHIVING] src/main.rs +[ARCHIVING] .cargo_vcs_info.json +[ARCHIVING] Cargo.lock +", + ) + .run(); + + let f = File::open(&repo.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let vcs_contents = format!( + r#"{{ + "git": {{ + "sha1": "{}" + }} +}} +"#, + repo.revparse_head() + ); + validate_crate_contents( + f, + "foo-0.0.1.crate", + &[ + "Cargo.lock", + "Cargo.toml", + "Cargo.toml.orig", + "src/main.rs", + ".cargo_vcs_info.json", + ], + &[(".cargo_vcs_info.json", &vcs_contents)], + ); + + println!("package sub-repo"); + cargo_process("package -v --no-verify") + .cwd(repo.root().join("a")) + .with_stderr( + "\ +[WARNING] manifest has no description[..] +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] a v0.0.1 ([..]) +[ARCHIVING] Cargo.toml +[ARCHIVING] src/lib.rs +[ARCHIVING] .cargo_vcs_info.json +", + ) + .run(); +} + +#[cargo_test] +fn package_verification() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("build").run(); + p.cargo("package") + .with_stderr( + "\ +[WARNING] manifest has no description[..] +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn vcs_file_collision() { + let p = project().build(); + let _ = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + description = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + documentation = "foo" + homepage = "foo" + repository = "foo" + exclude = ["*.no-existe"] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + "#, + ) + .file(".cargo_vcs_info.json", "foo") + .build(); + p.cargo("package") + .arg("--no-verify") + .with_status(101) + .with_stderr( + "\ +[ERROR] Invalid inclusion of reserved file name .cargo_vcs_info.json \ +in package source +", + ) + .run(); +} + +#[cargo_test] +fn path_dependency_no_version() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("package") + .with_status(101) + .with_stderr( + "\ +[WARNING] manifest has no documentation, homepage or repository. +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[ERROR] all path dependencies must have a version specified when packaging. +dependency `bar` does not specify a version. +", + ) + .run(); +} + +#[cargo_test] +fn exclude() { + let root = paths::root().join("exclude"); + let repo = git::repo(&root) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = [ + "*.txt", + # file in root + "file_root_1", # NO_CHANGE (ignored) + "/file_root_2", # CHANGING (packaged -> ignored) + "file_root_3/", # NO_CHANGE (packaged) + "file_root_4/*", # NO_CHANGE (packaged) + "file_root_5/**", # NO_CHANGE (packaged) + # file in sub-dir + "file_deep_1", # CHANGING (packaged -> ignored) + "/file_deep_2", # NO_CHANGE (packaged) + "file_deep_3/", # NO_CHANGE (packaged) + "file_deep_4/*", # NO_CHANGE (packaged) + "file_deep_5/**", # NO_CHANGE (packaged) + # dir in root + "dir_root_1", # CHANGING (packaged -> ignored) + "/dir_root_2", # CHANGING (packaged -> ignored) + "dir_root_3/", # CHANGING (packaged -> ignored) + "dir_root_4/*", # NO_CHANGE (ignored) + "dir_root_5/**", # NO_CHANGE (ignored) + # dir in sub-dir + "dir_deep_1", # CHANGING (packaged -> ignored) + "/dir_deep_2", # NO_CHANGE + "dir_deep_3/", # CHANGING (packaged -> ignored) + "dir_deep_4/*", # CHANGING (packaged -> ignored) + "dir_deep_5/**", # CHANGING (packaged -> ignored) + ] + "#, + ) + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .file("bar.txt", "") + .file("src/bar.txt", "") + // File in root. + .file("file_root_1", "") + .file("file_root_2", "") + .file("file_root_3", "") + .file("file_root_4", "") + .file("file_root_5", "") + // File in sub-dir. + .file("some_dir/file_deep_1", "") + .file("some_dir/file_deep_2", "") + .file("some_dir/file_deep_3", "") + .file("some_dir/file_deep_4", "") + .file("some_dir/file_deep_5", "") + // Dir in root. + .file("dir_root_1/some_dir/file", "") + .file("dir_root_2/some_dir/file", "") + .file("dir_root_3/some_dir/file", "") + .file("dir_root_4/some_dir/file", "") + .file("dir_root_5/some_dir/file", "") + // Dir in sub-dir. + .file("some_dir/dir_deep_1/some_dir/file", "") + .file("some_dir/dir_deep_2/some_dir/file", "") + .file("some_dir/dir_deep_3/some_dir/file", "") + .file("some_dir/dir_deep_4/some_dir/file", "") + .file("some_dir/dir_deep_5/some_dir/file", "") + .build(); + + cargo_process("package --no-verify -v") + .cwd(repo.root()) + .with_stdout("") + .with_stderr( + "\ +[WARNING] manifest has no description[..] +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] Cargo.toml +[ARCHIVING] file_root_3 +[ARCHIVING] file_root_4 +[ARCHIVING] file_root_5 +[ARCHIVING] some_dir/dir_deep_2/some_dir/file +[ARCHIVING] some_dir/dir_deep_4/some_dir/file +[ARCHIVING] some_dir/dir_deep_5/some_dir/file +[ARCHIVING] some_dir/file_deep_2 +[ARCHIVING] some_dir/file_deep_3 +[ARCHIVING] some_dir/file_deep_4 +[ARCHIVING] some_dir/file_deep_5 +[ARCHIVING] src/main.rs +[ARCHIVING] .cargo_vcs_info.json +[ARCHIVING] Cargo.lock +", + ) + .run(); + + assert!(repo.root().join("target/package/foo-0.0.1.crate").is_file()); + + cargo_process("package -l") + .cwd(repo.root()) + .with_stdout( + "\ +.cargo_vcs_info.json +Cargo.lock +Cargo.toml +file_root_3 +file_root_4 +file_root_5 +some_dir/dir_deep_2/some_dir/file +some_dir/dir_deep_4/some_dir/file +some_dir/dir_deep_5/some_dir/file +some_dir/file_deep_2 +some_dir/file_deep_3 +some_dir/file_deep_4 +some_dir/file_deep_5 +src/main.rs +", + ) + .run(); +} + +#[cargo_test] +fn include() { + let root = paths::root().join("include"); + let repo = git::repo(&root) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + include = ["foo.txt", "**/*.rs", "Cargo.toml"] + "#, + ) + .file("foo.txt", "") + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + // Should be ignored when packaging. + .file("src/bar.txt", "") + .build(); + + cargo_process("package --no-verify -v") + .cwd(repo.root()) + .with_stderr( + "\ +[WARNING] manifest has no description[..] +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[WARNING] both package.include and package.exclude are specified; the exclude list will be ignored +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] Cargo.toml +[ARCHIVING] foo.txt +[ARCHIVING] src/main.rs +[ARCHIVING] .cargo_vcs_info.json +[ARCHIVING] Cargo.lock +", + ) + .run(); +} + +#[cargo_test] +fn package_lib_with_bin() { + let p = project() + .file("src/main.rs", "extern crate foo; fn main() {}") + .file("src/lib.rs", "") + .build(); + + p.cargo("package -v").run(); +} + +#[cargo_test] +fn package_git_submodule() { + let project = git::new("foo", |project| { + project + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = ["foo@example.com"] + license = "MIT" + description = "foo" + repository = "foo" + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + }) + .unwrap(); + let library = git::new("bar", |library| { + library.no_manifest().file("Makefile", "all:") + }) + .unwrap(); + + let repository = git2::Repository::open(&project.root()).unwrap(); + let url = path2url(library.root()).to_string(); + git::add_submodule(&repository, &url, Path::new("bar")); + git::commit(&repository); + + let repository = git2::Repository::open(&project.root().join("bar")).unwrap(); + repository + .reset( + &repository.revparse_single("HEAD").unwrap(), + git2::ResetType::Hard, + None, + ) + .unwrap(); + + project + .cargo("package --no-verify -v") + .with_stderr_contains("[ARCHIVING] bar/Makefile") + .run(); +} + +#[cargo_test] +fn no_duplicates_from_modified_tracked_files() { + let root = paths::root().join("all"); + let p = git::repo(&root) + .file("Cargo.toml", &basic_manifest("foo", "0.0.1")) + .file("src/main.rs", "fn main() {}") + .build(); + File::create(p.root().join("src/main.rs")) + .unwrap() + .write_all(br#"fn main() { println!("A change!"); }"#) + .unwrap(); + cargo_process("build").cwd(p.root()).run(); + cargo_process("package --list --allow-dirty") + .cwd(p.root()) + .with_stdout( + "\ +Cargo.lock +Cargo.toml +src/main.rs +", + ) + .run(); +} + +#[cargo_test] +fn ignore_nested() { + let cargo_toml = r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#; + let main_rs = r#" + fn main() { println!("hello"); } + "#; + let p = project() + .file("Cargo.toml", cargo_toml) + .file("src/main.rs", main_rs) + // If a project happens to contain a copy of itself, we should + // ignore it. + .file("a_dir/foo/Cargo.toml", cargo_toml) + .file("a_dir/foo/src/main.rs", main_rs) + .build(); + + p.cargo("package") + .with_stderr( + "\ +[WARNING] manifest has no documentation[..] +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); + p.cargo("package -l") + .with_stdout( + "\ +Cargo.lock +Cargo.toml +src[..]main.rs +", + ) + .run(); + p.cargo("package").with_stdout("").run(); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + validate_crate_contents( + f, + "foo-0.0.1.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + &[], + ); +} + +// Windows doesn't allow these characters in filenames. +#[cfg(unix)] +#[cargo_test] +fn package_weird_characters() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .file("src/:foo", "") + .build(); + + p.cargo("package") + .with_status(101) + .with_stderr( + "\ +warning: [..] +See [..] +[PACKAGING] foo [..] +[ERROR] failed to prepare local package for uploading + +Caused by: + cannot package a filename with a special character `:`: src/:foo +", + ) + .run(); +} + +#[cargo_test] +fn repackage_on_source_change() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + + p.cargo("package").run(); + + // Add another source file + let mut file = File::create(p.root().join("src").join("foo.rs")).unwrap_or_else(|e| { + panic!( + "could not create file {}: {}", + p.root().join("src/foo.rs").display(), + e + ) + }); + + file.write_all(br#"fn main() { println!("foo"); }"#) + .unwrap(); + std::mem::drop(file); + + // Check that cargo rebuilds the tarball + p.cargo("package") + .with_stderr( + "\ +[WARNING] [..] +See [..] +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + // Check that the tarball contains the added file + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + validate_crate_contents( + f, + "foo-0.0.1.crate", + &[ + "Cargo.lock", + "Cargo.toml", + "Cargo.toml.orig", + "src/main.rs", + "src/foo.rs", + ], + &[], + ); +} + +#[cargo_test] +#[cfg(unix)] +fn broken_symlink() { + use std::os::unix::fs; + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = 'foo' + documentation = 'foo' + homepage = 'foo' + repository = 'foo' + "#, + ) + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + t!(fs::symlink("nowhere", &p.root().join("src/foo.rs"))); + + p.cargo("package -v") + .with_status(101) + .with_stderr_contains( + "\ +error: failed to prepare local package for uploading + +Caused by: + failed to open for archiving: `[..]foo.rs` + +Caused by: + [..] +", + ) + .run(); +} + +#[cargo_test] +fn do_not_package_if_repository_is_dirty() { + let p = project().build(); + + // Create a Git repository containing a minimal Rust project. + let _ = git::repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // Modify Cargo.toml without committing the change. + p.change_file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + # change + "#, + ); + + p.cargo("package") + .with_status(101) + .with_stderr( + "\ +error: 1 files in the working directory contain changes that were not yet \ +committed into git: + +Cargo.toml + +to proceed despite this and include the uncommited changes, pass the `--allow-dirty` flag +", + ) + .run(); +} + +#[cargo_test] +fn generated_manifest() { + Package::new("abc", "1.0.0").publish(); + Package::new("def", "1.0.0").alternative(true).publish(); + Package::new("ghi", "1.0.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + exclude = ["*.txt"] + license = "MIT" + description = "foo" + + [project.metadata] + foo = 'bar' + + [workspace] + + [dependencies] + bar = { path = "bar", version = "0.1" } + def = { version = "1.0", registry = "alternative" } + ghi = "1.0" + abc = "1.0" + "#, + ) + .file("src/main.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("package --no-verify").run(); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + let rewritten_toml = format!( + r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "foo" +version = "0.0.1" +authors = [] +exclude = ["*.txt"] +description = "foo" +license = "MIT" + +[package.metadata] +foo = "bar" +[dependencies.abc] +version = "1.0" + +[dependencies.bar] +version = "0.1" + +[dependencies.def] +version = "1.0" +registry-index = "{}" + +[dependencies.ghi] +version = "1.0" +"#, + registry::alt_registry_url() + ); + + validate_crate_contents( + f, + "foo-0.0.1.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + &[("Cargo.toml", &rewritten_toml)], + ); +} + +#[cargo_test] +fn ignore_workspace_specifier() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + + authors = [] + + [workspace] + + [dependencies] + bar = { path = "bar", version = "0.1" } + "#, + ) + .file("src/main.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("package --no-verify").cwd("bar").run(); + + let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap(); + let rewritten_toml = r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies +# +# If you believe there's an error in this file please file an +# issue against the rust-lang/cargo repository. If you're +# editing this file be aware that the upstream Cargo.toml +# will likely look very different (and much more reasonable) + +[package] +name = "bar" +version = "0.1.0" +authors = [] +"#; + validate_crate_contents( + f, + "bar-0.1.0.crate", + &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], + &[("Cargo.toml", rewritten_toml)], + ); +} + +#[cargo_test] +fn package_two_kinds_of_deps() { + Package::new("other", "1.0.0").publish(); + Package::new("other1", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + other = "1.0" + other1 = { version = "1.0" } + "#, + ) + .file("src/main.rs", "") + .build(); + + p.cargo("package --no-verify").run(); +} + +#[cargo_test] +fn test_edition() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["edition"] + [package] + name = "foo" + version = "0.0.1" + authors = [] + edition = "2018" + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("build -v") + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..]--edition=2018 [..] +", + ) + .run(); +} + +#[cargo_test] +fn edition_with_metadata() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + edition = "2018" + + [package.metadata.docs.rs] + features = ["foobar"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("package").run(); +} + +#[cargo_test] +fn test_edition_malformed() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + edition = "chicken" + "#, + ) + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + failed to parse the `edition` key + +Caused by: + supported edition values are `2015` or `2018`, but `chicken` is unknown +" + .to_string(), + ) + .run(); +} + +#[cargo_test] +fn do_not_package_if_src_was_modified() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .file("dir/foo.txt", "") + .file("bar.txt", "") + .file( + "build.rs", + r#" + use std::fs; + + fn main() { + fs::write("src/generated.txt", + "Hello, world of generated files." + ).expect("failed to create file"); + fs::remove_file("dir/foo.txt").expect("failed to remove file"); + fs::remove_dir("dir").expect("failed to remove dir"); + fs::write("bar.txt", "updated content").expect("failed to update"); + fs::create_dir("new-dir").expect("failed to create dir"); + } + "#, + ) + .build(); + + p.cargo("package") + .with_status(101) + .with_stderr_contains( + "\ +error: failed to verify package tarball + +Caused by: + Source directory was modified by build.rs during cargo publish. \ +Build scripts should not modify anything outside of OUT_DIR. +Changed: [CWD]/target/package/foo-0.0.1/bar.txt +Added: [CWD]/target/package/foo-0.0.1/new-dir +[CWD]/target/package/foo-0.0.1/src/generated.txt +Removed: [CWD]/target/package/foo-0.0.1/dir +[CWD]/target/package/foo-0.0.1/dir/foo.txt + +To proceed despite this, pass the `--no-verify` flag.", + ) + .run(); + + p.cargo("package --no-verify").run(); +} + +#[cargo_test] +fn package_with_select_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [features] + required = [] + optional = [] + "#, + ) + .file( + "src/main.rs", + "#[cfg(not(feature = \"required\"))] + compile_error!(\"This crate requires `required` feature!\"); + fn main() {}", + ) + .build(); + + p.cargo("package --features required").run(); +} + +#[cargo_test] +fn package_with_all_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [features] + required = [] + optional = [] + "#, + ) + .file( + "src/main.rs", + "#[cfg(not(feature = \"required\"))] + compile_error!(\"This crate requires `required` feature!\"); + fn main() {}", + ) + .build(); + + p.cargo("package --all-features").run(); +} + +#[cargo_test] +fn package_no_default_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [features] + default = ["required"] + required = [] + "#, + ) + .file( + "src/main.rs", + "#[cfg(not(feature = \"required\"))] + compile_error!(\"This crate requires `required` feature!\"); + fn main() {}", + ) + .build(); + + p.cargo("package --no-default-features") + .with_stderr_contains("error: This crate requires `required` feature!") + .with_status(101) + .run(); +} + +#[cargo_test] +fn include_cargo_toml_implicit() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + include = ["src/lib.rs"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("package --list") + .with_stdout("Cargo.toml\nsrc/lib.rs\n") + .run(); +} + +fn include_exclude_test(include: &str, exclude: &str, files: &[&str], expected: &str) { + let mut pb = project().file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + include = {} + exclude = {} + "#, + include, exclude + ), + ); + for file in files { + pb = pb.file(file, ""); + } + let p = pb.build(); + + p.cargo("package --list") + .with_stderr("") + .with_stdout(expected) + .run(); + p.root().rm_rf(); +} + +#[cargo_test] +fn package_include_ignore_only() { + // Test with a gitignore pattern that fails to parse with glob. + // This is a somewhat nonsense pattern, but is an example of something git + // allows and glob does not. + assert!(glob::Pattern::new("src/abc**").is_err()); + + include_exclude_test( + r#"["Cargo.toml", "src/abc**", "src/lib.rs"]"#, + "[]", + &["src/lib.rs", "src/abc1.rs", "src/abc2.rs", "src/abc/mod.rs"], + "Cargo.toml\n\ + src/abc/mod.rs\n\ + src/abc1.rs\n\ + src/abc2.rs\n\ + src/lib.rs\n\ + ", + ) +} + +#[cargo_test] +fn gitignore_patterns() { + include_exclude_test( + r#"["Cargo.toml", "foo"]"#, // include + "[]", + &["src/lib.rs", "foo", "a/foo", "a/b/foo", "x/foo/y", "bar"], + "Cargo.toml\n\ + a/b/foo\n\ + a/foo\n\ + foo\n\ + x/foo/y\n\ + ", + ); + + include_exclude_test( + r#"["Cargo.toml", "/foo"]"#, // include + "[]", + &["src/lib.rs", "foo", "a/foo", "a/b/foo", "x/foo/y", "bar"], + "Cargo.toml\n\ + foo\n\ + ", + ); + + include_exclude_test( + "[]", + r#"["foo/"]"#, // exclude + &["src/lib.rs", "foo", "a/foo", "x/foo/y", "bar"], + "Cargo.toml\n\ + a/foo\n\ + bar\n\ + foo\n\ + src/lib.rs\n\ + ", + ); + + include_exclude_test( + "[]", + r#"["*.txt", "[ab]", "[x-z]"]"#, // exclude + &[ + "src/lib.rs", + "foo.txt", + "bar/foo.txt", + "other", + "a", + "b", + "c", + "x", + "y", + "z", + ], + "Cargo.toml\n\ + c\n\ + other\n\ + src/lib.rs\n\ + ", + ); + + include_exclude_test( + r#"["Cargo.toml", "**/foo/bar"]"#, // include + "[]", + &["src/lib.rs", "a/foo/bar", "foo", "bar"], + "Cargo.toml\n\ + a/foo/bar\n\ + ", + ); + + include_exclude_test( + r#"["Cargo.toml", "foo/**"]"#, // include + "[]", + &["src/lib.rs", "a/foo/bar", "foo/x/y/z"], + "Cargo.toml\n\ + foo/x/y/z\n\ + ", + ); + + include_exclude_test( + r#"["Cargo.toml", "a/**/b"]"#, // include + "[]", + &["src/lib.rs", "a/b", "a/x/b", "a/x/y/b"], + "Cargo.toml\n\ + a/b\n\ + a/x/b\n\ + a/x/y/b\n\ + ", + ); +} + +#[cargo_test] +fn gitignore_negate() { + include_exclude_test( + r#"["Cargo.toml", "*.rs", "!foo.rs", "\\!important"]"#, // include + "[]", + &["src/lib.rs", "foo.rs", "!important"], + "!important\n\ + Cargo.toml\n\ + src/lib.rs\n\ + ", + ); + + // NOTE: This is unusual compared to git. Git treats `src/` as a + // short-circuit which means rules like `!src/foo.rs` would never run. + // However, because Cargo only works by iterating over *files*, it doesn't + // short-circuit. + include_exclude_test( + r#"["Cargo.toml", "src/", "!src/foo.rs"]"#, // include + "[]", + &["src/lib.rs", "src/foo.rs"], + "Cargo.toml\n\ + src/lib.rs\n\ + ", + ); + + include_exclude_test( + r#"["Cargo.toml", "src/*.rs", "!foo.rs"]"#, // include + "[]", + &["src/lib.rs", "foo.rs", "src/foo.rs", "src/bar/foo.rs"], + "Cargo.toml\n\ + src/lib.rs\n\ + ", + ); + + include_exclude_test( + "[]", + r#"["*.rs", "!foo.rs", "\\!important"]"#, // exclude + &["src/lib.rs", "foo.rs", "!important"], + "Cargo.toml\n\ + foo.rs\n\ + ", + ); +} diff --git a/tests/testsuite/patch.rs b/tests/testsuite/patch.rs new file mode 100644 index 00000000000..e3d699cc812 --- /dev/null +++ b/tests/testsuite/patch.rs @@ -0,0 +1,1136 @@ +use std::fs::{self, File}; +use std::io::{Read, Write}; + +use crate::support::git; +use crate::support::paths; +use crate::support::registry::Package; +use crate::support::{basic_manifest, project}; +use toml; + +#[cargo_test] +fn replace() { + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0") + .file( + "src/lib.rs", + "extern crate bar; pub fn baz() { bar::bar(); }", + ) + .dep("bar", "0.1.0") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + baz = "0.1.0" + + [patch.crates-io] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate bar; + extern crate baz; + pub fn bar() { + bar::bar(); + baz::baz(); + } + ", + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] baz v0.1.0 ([..]) +[COMPILING] bar v0.1.0 ([CWD]/bar) +[COMPILING] baz v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn nonexistent() { + Package::new("baz", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [patch.crates-io] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[COMPILING] bar v0.1.0 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn patch_git() { + let bar = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = {{ git = '{}' }} + + [patch.'{0}'] + bar = {{ path = "bar" }} + "#, + bar.url() + ), + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[COMPILING] bar v0.1.0 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn patch_to_git() { + let bar = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn bar() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [patch.crates-io] + bar = {{ git = '{}' }} + "#, + bar.url() + ), + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::bar(); }", + ) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[UPDATING] `[ROOT][..]` index +[COMPILING] bar v0.1.0 (file://[..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn unused() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [patch.crates-io] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) + .file("bar/src/lib.rs", "not rust code") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. +[..] +[..] +[..] +[..] +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 [..] +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("build") + .with_stderr( + "\ +[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. +[..] +[..] +[..] +[..] +[FINISHED] [..] +", + ) + .run(); + + // unused patch should be in the lock file + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lock) + .unwrap(); + let toml: toml::Value = toml::from_str(&lock).unwrap(); + assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); + assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar")); + assert_eq!( + toml["patch"]["unused"][0]["version"].as_str(), + Some("0.2.0") + ); +} + +#[cargo_test] +fn unused_git() { + Package::new("bar", "0.1.0").publish(); + + let foo = git::repo(&paths::root().join("override")) + .file("Cargo.toml", &basic_manifest("bar", "0.2.0")) + .file("src/lib.rs", "") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [patch.crates-io] + bar = {{ git = '{}' }} + "#, + foo.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[UPDATING] `[ROOT][..]` index +[WARNING] Patch `bar v0.2.0 ([..])` was not used in the crate graph. +[..] +[..] +[..] +[..] +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 [..] +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("build") + .with_stderr( + "\ +[WARNING] Patch `bar v0.2.0 ([..])` was not used in the crate graph. +[..] +[..] +[..] +[..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn add_patch() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", r#""#) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 [..] +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [patch.crates-io] + bar = { path = 'bar' } + "# + )); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.1.0 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn add_ignored_patch() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) + .file("bar/src/lib.rs", r#""#) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 [..] +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [patch.crates-io] + bar = { path = 'bar' } + "# + )); + + p.cargo("build") + .with_stderr( + "\ +[WARNING] Patch `bar v0.1.1 ([CWD]/bar)` was not used in the crate graph. +[..] +[..] +[..] +[..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); + p.cargo("build") + .with_stderr( + "\ +[WARNING] Patch `bar v0.1.1 ([CWD]/bar)` was not used in the crate graph. +[..] +[..] +[..] +[..] +[FINISHED] [..]", + ) + .run(); + + p.cargo("update").run(); + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.1.1 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [..] +", + ) + .run(); +} + +#[cargo_test] +fn no_warn_ws_patch() { + Package::new("c", "0.1.0").publish(); + + // Don't issue an unused patch warning when the patch isn't used when + // partially building a workspace. + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b", "c"] + + [patch.crates-io] + c = { path = "c" } + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.1.0" + [dependencies] + c = "0.1.0" + "#, + ) + .file("b/src/lib.rs", "") + .file("c/Cargo.toml", &basic_manifest("c", "0.1.0")) + .file("c/src/lib.rs", "") + .build(); + + p.cargo("build -p a") + .with_stderr( + "\ +[UPDATING] [..] +[COMPILING] a [..] +[FINISHED] [..]", + ) + .run(); +} + +#[cargo_test] +fn new_minor() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + + [patch.crates-io] + bar = { path = 'bar' } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) + .file("bar/src/lib.rs", r#""#) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[COMPILING] bar v0.1.1 [..] +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn transitive_new_minor() { + Package::new("baz", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = 'bar' } + + [patch.crates-io] + baz = { path = 'baz' } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = '0.1.0' + "#, + ) + .file("bar/src/lib.rs", r#""#) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1")) + .file("baz/src/lib.rs", r#""#) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[COMPILING] baz v0.1.1 [..] +[COMPILING] bar v0.1.0 [..] +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn new_major() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.2.0" + + [patch.crates-io] + bar = { path = 'bar' } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) + .file("bar/src/lib.rs", r#""#) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[COMPILING] bar v0.2.0 [..] +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + Package::new("bar", "0.2.0").publish(); + p.cargo("update").run(); + p.cargo("build") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.2.0" + "# + )); + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.2.0 [..] +[COMPILING] bar v0.2.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn transitive_new_major() { + Package::new("baz", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = 'bar' } + + [patch.crates-io] + baz = { path = 'baz' } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = '0.2.0' + "#, + ) + .file("bar/src/lib.rs", r#""#) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.2.0")) + .file("baz/src/lib.rs", r#""#) + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[ROOT][..]` index +[COMPILING] baz v0.2.0 [..] +[COMPILING] bar v0.1.0 [..] +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn remove_patch() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [patch.crates-io] + foo = { path = 'foo' } + bar = { path = 'bar' } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", r#""#) + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", r#""#) + .build(); + + // Generate a lock file where `foo` is unused + p.cargo("build").run(); + let mut lock_file1 = String::new(); + File::open(p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lock_file1) + .unwrap(); + + // Remove `foo` and generate a new lock file form the old one + File::create(p.root().join("Cargo.toml")) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1" + + [patch.crates-io] + bar = { path = 'bar' } + "#, + ) + .unwrap(); + p.cargo("build").run(); + let mut lock_file2 = String::new(); + File::open(p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lock_file2) + .unwrap(); + + // Remove the lock file and build from scratch + fs::remove_file(p.root().join("Cargo.lock")).unwrap(); + p.cargo("build").run(); + let mut lock_file3 = String::new(); + File::open(p.root().join("Cargo.lock")) + .unwrap() + .read_to_string(&mut lock_file3) + .unwrap(); + + assert!(lock_file1.contains("foo")); + assert_eq!(lock_file2, lock_file3); + assert_ne!(lock_file1, lock_file2); +} + +#[cargo_test] +fn non_crates_io() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [patch.some-other-source] + bar = { path = 'bar' } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", r#""#) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + [patch] entry `some-other-source` should be a URL or registry name + +Caused by: + invalid url `some-other-source`: relative URL without a base +", + ) + .run(); +} + +#[cargo_test] +fn replace_with_crates_io() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [patch.crates-io] + bar = "0.1" + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", r#""#) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] +error: failed to resolve patches for `[..]` + +Caused by: + patch for `bar` in `[..]` points to the same source, but patches must point \ + to different sources +", + ) + .run(); +} + +#[cargo_test] +fn patch_in_virtual() { + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + + [patch.crates-io] + bar = { path = "bar" } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", r#""#) + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "0.1" + "#, + ) + .file("foo/src/lib.rs", r#""#) + .build(); + + p.cargo("build").run(); + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn patch_depends_on_another_patch() { + Package::new("bar", "0.1.0") + .file("src/lib.rs", "broken code") + .publish(); + + Package::new("baz", "0.1.0") + .dep("bar", "0.1") + .file("src/lib.rs", "broken code") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.1.0" + + [dependencies] + bar = "0.1" + baz = "0.1" + + [patch.crates-io] + bar = { path = "bar" } + baz = { path = "baz" } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) + .file("bar/src/lib.rs", r#""#) + .file( + "baz/Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.1" + authors = [] + + [dependencies] + bar = "0.1" + "#, + ) + .file("baz/src/lib.rs", r#""#) + .build(); + + p.cargo("build").run(); + + // Nothing should be rebuilt, no registry should be updated. + p.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn replace_prerelease() { + Package::new("baz", "1.1.0-pre.1").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + + [patch.crates-io] + baz = { path = "./baz" } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = "1.1.0-pre.1" + "#, + ) + .file( + "bar/src/main.rs", + "extern crate baz; fn main() { baz::baz() }", + ) + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "1.1.0-pre.1" + authors = [] + [workspace] + "#, + ) + .file("baz/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn patch_older() { + Package::new("baz", "1.0.2").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = 'bar' } + baz = "=1.0.1" + + [patch.crates-io] + baz = { path = "./baz" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = "1.0.0" + "#, + ) + .file("bar/src/lib.rs", "") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "1.0.1" + authors = [] + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] [..] +[COMPILING] baz v1.0.1 [..] +[COMPILING] bar v0.5.0 [..] +[COMPILING] foo v0.1.0 [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn cycle() { + Package::new("a", "1.0.0").publish(); + Package::new("b", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + + [patch.crates-io] + a = {path="a"} + b = {path="b"} + "#, + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "1.0.0" + + [dependencies] + b = "1.0" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "1.0.0" + + [dependencies] + a = "1.0" + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("check") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] +error: cyclic package dependency: [..] +package `[..]` + ... which is depended on by `[..]` +", + ) + .run(); +} diff --git a/tests/testsuite/path.rs b/tests/testsuite/path.rs new file mode 100644 index 00000000000..226668ceb63 --- /dev/null +++ b/tests/testsuite/path.rs @@ -0,0 +1,1046 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use crate::support::paths::{self, CargoPathExt}; +use crate::support::registry::Package; +use crate::support::sleep_ms; +use crate::support::{basic_lib_manifest, basic_manifest, main_file, project}; + +#[cargo_test] +// I have no idea why this is failing spuriously on Windows; +// for more info, see #3466. +#[cfg(not(windows))] +fn cargo_compile_with_nested_deps_shorthand() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.baz] + + version = "0.5.0" + path = "baz" + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + extern crate baz; + + pub fn gimme() -> String { + baz::gimme() + } + "#, + ) + .file("bar/baz/Cargo.toml", &basic_lib_manifest("baz")) + .file( + "bar/baz/src/baz.rs", + r#" + pub fn gimme() -> String { + "test passed".to_string() + } + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\ + [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); + + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("test passed\n").run(); + + println!("cleaning"); + p.cargo("clean -v").with_stdout("").run(); + println!("building baz"); + p.cargo("build -p baz") + .with_stderr( + "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); + println!("building foo"); + p.cargo("build -p foo") + .with_stderr( + "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn cargo_compile_with_root_dev_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + + version = "0.5.0" + path = "../bar" + + [[bin]] + name = "foo" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .build(); + let _p2 = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file( + "src/lib.rs", + r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#, + ) + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]can't find crate for `bar`") + .run(); +} + +#[cargo_test] +fn cargo_compile_with_root_dev_deps_with_testing() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.bar] + + version = "0.5.0" + path = "../bar" + + [[bin]] + name = "foo" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .build(); + let _p2 = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file( + "src/lib.rs", + r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#, + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] [..] v0.5.0 ([..]) +[COMPILING] [..] v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn cargo_compile_with_transitive_dev_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dev-dependencies.baz] + + git = "git://example.com/path/to/nowhere" + + [lib] + + name = "bar" + "#, + ) + .file( + "bar/src/bar.rs", + r#" + pub fn gimme() -> &'static str { + "zoidberg" + } + "#, + ) + .build(); + + p.cargo("build") + .with_stderr( + "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + ) + .run(); + + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("zoidberg\n").run(); +} + +#[cargo_test] +fn no_rebuild_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/bar.rs", "pub fn bar() {}") + .build(); + // First time around we should compile both foo and bar + p.cargo("build") + .with_stderr( + "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); + + sleep_ms(1000); + p.change_file( + "src/main.rs", + r#" + extern crate bar; + fn main() { bar::bar(); } + "#, + ); + // Don't compile bar, but do recompile foo. + p.cargo("build") + .with_stderr( + "[COMPILING] foo v0.5.0 ([..])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn deep_dependencies_trigger_rebuild() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "bar" + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/bar.rs", + "extern crate baz; pub fn bar() { baz::baz() }", + ) + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file("baz/src/baz.rs", "pub fn baz() {}") + .build(); + p.cargo("build") + .with_stderr( + "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ + [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); + p.cargo("build").with_stdout("").run(); + + // Make sure an update to baz triggers a rebuild of bar + // + // We base recompilation off mtime, so sleep for at least a second to ensure + // that this write will change the mtime. + sleep_ms(1000); + File::create(&p.root().join("baz/src/baz.rs")) + .unwrap() + .write_all(br#"pub fn baz() { println!("hello!"); }"#) + .unwrap(); + sleep_ms(1000); + p.cargo("build") + .with_stderr( + "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ + [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); + + // Make sure an update to bar doesn't trigger baz + sleep_ms(1000); + File::create(&p.root().join("bar/src/bar.rs")) + .unwrap() + .write_all( + br#" + extern crate baz; + pub fn bar() { println!("hello!"); baz::baz(); } + "#, + ) + .unwrap(); + sleep_ms(1000); + p.cargo("build") + .with_stderr( + "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn no_rebuild_two_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = "bar" + [dependencies.baz] + path = "baz" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + name = "bar" + [dependencies.baz] + path = "../baz" + "#, + ) + .file("bar/src/bar.rs", "pub fn bar() {}") + .file("baz/Cargo.toml", &basic_lib_manifest("baz")) + .file("baz/src/baz.rs", "pub fn baz() {}") + .build(); + p.cargo("build") + .with_stderr( + "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ + [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); + assert!(p.bin("foo").is_file()); + p.cargo("build").with_stdout("").run(); + assert!(p.bin("foo").is_file()); +} + +#[cargo_test] +fn nested_deps_recompile() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "src/bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file("src/bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }") + .build(); + + p.cargo("build") + .with_stderr( + "[COMPILING] bar v0.5.0 ([CWD]/src/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); + sleep_ms(1000); + + File::create(&p.root().join("src/main.rs")) + .unwrap() + .write_all(br#"fn main() {}"#) + .unwrap(); + + // This shouldn't recompile `bar` + p.cargo("build") + .with_stderr( + "[COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); +} + +#[cargo_test] +fn error_message_for_missing_manifest() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + path = "src/bar" + "#, + ) + .file("src/lib.rs", "") + .file("src/bar/not-a-manifest", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to load source for a dependency on `bar` + +Caused by: + Unable to update [CWD]/src/bar + +Caused by: + failed to read `[..]bar/Cargo.toml` + +Caused by: + [..] (os error [..]) +", + ) + .run(); +} + +#[cargo_test] +fn override_relative() { + let bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "") + .build(); + + fs::create_dir(&paths::root().join(".cargo")).unwrap(); + File::create(&paths::root().join(".cargo/config")) + .unwrap() + .write_all(br#"paths = ["bar"]"#) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = '{}' + "#, + bar.root().display() + ), + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build -v").run(); +} + +#[cargo_test] +fn override_self() { + let bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("src/lib.rs", "") + .build(); + + let p = project(); + let root = p.root(); + let p = p + .file(".cargo/config", &format!("paths = ['{}']", root.display())) + .file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + path = '{}' + + "#, + bar.root().display() + ), + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn override_path_dep() { + let bar = project() + .at("bar") + .file( + "p1/Cargo.toml", + r#" + [package] + name = "p1" + version = "0.5.0" + authors = [] + + [dependencies.p2] + path = "../p2" + "#, + ) + .file("p1/src/lib.rs", "") + .file("p2/Cargo.toml", &basic_manifest("p2", "0.5.0")) + .file("p2/src/lib.rs", "") + .build(); + + let p = project() + .file( + ".cargo/config", + &format!( + "paths = ['{}', '{}']", + bar.root().join("p1").display(), + bar.root().join("p2").display() + ), + ) + .file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.p2] + path = '{}' + + "#, + bar.root().join("p2").display() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn path_dep_build_cmd() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + + name = "foo" + version = "0.5.0" + authors = ["wycats@example.com"] + + [dependencies.bar] + + version = "0.5.0" + path = "bar" + "#, + ) + .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) + .file( + "bar/Cargo.toml", + r#" + [project] + + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + build = "build.rs" + + [lib] + name = "bar" + path = "src/bar.rs" + "#, + ) + .file( + "bar/build.rs", + r#" + use std::fs; + fn main() { + fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); + } + "#, + ) + .file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 0 }") + .build(); + p.root().join("bar").move_into_the_past(); + + p.cargo("build") + .with_stderr( + "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + ) + .run(); + + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("0\n").run(); + + // Touching bar.rs.in should cause the `build` command to run again. + { + let file = fs::File::create(&p.root().join("bar/src/bar.rs.in")); + file.unwrap() + .write_all(br#"pub fn gimme() -> i32 { 1 }"#) + .unwrap(); + } + + p.cargo("build") + .with_stderr( + "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ + [COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) in \ + [..]\n", + ) + .run(); + + p.process(&p.bin("foo")).with_stdout("1\n").run(); +} + +#[cargo_test] +fn dev_deps_no_rebuild_lib() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dev-dependencies.bar] + path = "bar" + + [lib] + name = "foo" + doctest = false + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(test)] #[allow(unused_extern_crates)] extern crate bar; + #[cfg(not(test))] pub fn foo() { env!("FOO"); } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + p.cargo("build") + .env("FOO", "bar") + .with_stderr( + "[COMPILING] foo v0.5.0 ([CWD])\n\ + [FINISHED] dev [unoptimized + debuginfo] target(s) \ + in [..]\n", + ) + .run(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] [..] v0.5.0 ([CWD][..]) +[COMPILING] [..] v0.5.0 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn custom_target_no_rebuild() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "a" } + [workspace] + members = ["a", "b"] + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a = { path = "../a" } + "#, + ) + .file("b/src/lib.rs", "") + .build(); + p.cargo("build") + .with_stderr( + "\ +[COMPILING] a v0.5.0 ([..]) +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + t!(fs::rename( + p.root().join("target"), + p.root().join("target_moved") + )); + p.cargo("build --manifest-path=b/Cargo.toml") + .env("CARGO_TARGET_DIR", "target_moved") + .with_stderr( + "\ +[COMPILING] b v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn override_and_depend() { + let p = project() + .no_manifest() + .file( + "a/a1/Cargo.toml", + r#" + [project] + name = "a1" + version = "0.5.0" + authors = [] + [dependencies] + a2 = { path = "../a2" } + "#, + ) + .file("a/a1/src/lib.rs", "") + .file("a/a2/Cargo.toml", &basic_manifest("a2", "0.5.0")) + .file("a/a2/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [project] + name = "b" + version = "0.5.0" + authors = [] + [dependencies] + a1 = { path = "../a/a1" } + a2 = { path = "../a/a2" } + "#, + ) + .file("b/src/lib.rs", "") + .file("b/.cargo/config", r#"paths = ["../a"]"#) + .build(); + p.cargo("build") + .cwd("b") + .with_stderr( + "\ +[COMPILING] a2 v0.5.0 ([..]) +[COMPILING] a1 v0.5.0 ([..]) +[COMPILING] b v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn missing_path_dependency() { + let p = project() + .file("Cargo.toml", &basic_manifest("a", "0.5.0")) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#"paths = ["../whoa-this-does-not-exist"]"#, + ) + .build(); + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \ +(defined in `[..]`) + +Caused by: + failed to read directory `[..]` + +Caused by: + [..] (os error [..]) +", + ) + .run(); +} + +#[cargo_test] +fn invalid_path_dep_in_workspace_with_lockfile() { + Package::new("bar", "1.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "top" + version = "0.5.0" + authors = [] + + [workspace] + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + // Generate a lock file + p.cargo("build").run(); + + // Change the dependency on `bar` to an invalid path + File::create(&p.root().join("foo/Cargo.toml")) + .unwrap() + .write_all( + br#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = { path = "" } + "#, + ) + .unwrap(); + + // Make sure we get a nice error. In the past this actually stack + // overflowed! + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: no matching package named `bar` found +location searched: [..] +perhaps you meant: foo +required by package `foo v0.5.0 ([..])` +", + ) + .run(); +} + +#[cargo_test] +fn workspace_produces_rlib() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "top" + version = "0.5.0" + authors = [] + + [workspace] + + [dependencies] + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", &basic_manifest("foo", "0.5.0")) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + assert!(p.root().join("target/debug/libtop.rlib").is_file()); + assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); +} + +#[cargo_test] +fn thin_lto_works() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "top" + version = "0.5.0" + authors = [] + + [profile.release] + lto = 'thin' + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --release -v") + .with_stderr( + "\ +[COMPILING] top [..] +[RUNNING] `rustc [..] -C lto=thin [..]` +[FINISHED] [..] +", + ) + .run(); +} diff --git a/tests/testsuite/plugins.rs b/tests/testsuite/plugins.rs new file mode 100644 index 00000000000..1d2ca456da0 --- /dev/null +++ b/tests/testsuite/plugins.rs @@ -0,0 +1,438 @@ +use crate::support::{basic_manifest, project}; +use crate::support::{is_nightly, rustc_host}; + +#[cargo_test] +fn plugin_to_the_max() { + if !is_nightly() { + // plugins are unstable + return; + } + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo_lib" + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + extern crate foo_lib; + + fn main() { foo_lib::foo(); } + "#, + ) + .file( + "src/foo_lib.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + + pub fn foo() {} + "#, + ) + .build(); + let _bar = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + + [dependencies.baz] + path = "../baz" + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + + extern crate rustc_plugin; + extern crate baz; + + use rustc_plugin::Registry; + + #[plugin_registrar] + pub fn foo(_reg: &mut Registry) { + println!("{}", baz::baz()); + } + "#, + ) + .build(); + let _baz = project() + .at("baz") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + + [lib] + name = "baz" + crate_type = ["dylib"] + "#, + ) + .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") + .build(); + + foo.cargo("build").run(); + foo.cargo("doc").run(); +} + +#[cargo_test] +fn plugin_with_dynamic_native_dependency() { + if !is_nightly() { + // plugins are unstable + return; + } + + let build = project() + .at("builder") + .file( + "Cargo.toml", + r#" + [package] + name = "builder" + version = "0.0.1" + authors = [] + + [lib] + name = "builder" + crate-type = ["dylib"] + "#, + ) + .file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}") + .build(); + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#" + #![feature(plugin)] + #![plugin(bar)] + + fn main() {} + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = 'build.rs' + + [lib] + name = "bar" + plugin = true + "#, + ) + .file( + "bar/build.rs", + r#" + use std::env; + use std::fs; + use std::path::PathBuf; + + fn main() { + let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); + let root = PathBuf::from(env::var("BUILDER_ROOT").unwrap()); + let file = format!("{}builder{}", + env::consts::DLL_PREFIX, + env::consts::DLL_SUFFIX); + let src = root.join(&file); + let dst = out_dir.join(&file); + fs::copy(src, dst).unwrap(); + if cfg!(windows) { + fs::copy(root.join("builder.dll.lib"), + out_dir.join("builder.dll.lib")).unwrap(); + } + println!("cargo:rustc-flags=-L {}", out_dir.display()); + } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + extern crate rustc_plugin; + + use rustc_plugin::Registry; + + #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] + #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] + extern { fn foo(); } + + #[plugin_registrar] + pub fn bar(_reg: &mut Registry) { + unsafe { foo() } + } + "#, + ) + .build(); + + build.cargo("build").run(); + + let root = build.root().join("target").join("debug"); + foo.cargo("build -v").env("BUILDER_ROOT", root).run(); +} + +#[cargo_test] +fn plugin_integration() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + + [lib] + name = "foo" + plugin = true + doctest = false + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .file("tests/it_works.rs", "") + .build(); + + p.cargo("test -v").run(); +} + +#[cargo_test] +fn doctest_a_plugin() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "#[macro_use] extern crate bar;") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + plugin = true + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("test -v").run(); +} + +// See #1515 +#[cargo_test] +fn native_plugin_dependency_with_custom_ar_linker() { + let target = rustc_host(); + + let _foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + "#, + ) + .file("src/lib.rs", "") + .build(); + + let bar = project() + .at("bar") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = "../foo" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + ar = "nonexistent-ar" + linker = "nonexistent-linker" + "#, + target + ), + ) + .build(); + + bar.cargo("build --verbose") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` +[ERROR] [..]linker[..] +", + ) + .run(); +} + +#[cargo_test] +fn panic_abort_plugins() { + if !is_nightly() { + // requires rustc_private + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #![feature(rustc_private)] + extern crate syntax; + "#, + ) + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn shared_panic_abort_plugins() { + if !is_nightly() { + // requires rustc_private + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.dev] + panic = 'abort' + + [dependencies] + bar = { path = "bar" } + baz = { path = "baz" } + "#, + ) + .file("src/lib.rs", "extern crate baz;") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + + [dependencies] + baz = { path = "../baz" } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #![feature(rustc_private)] + extern crate syntax; + extern crate baz; + "#, + ) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) + .file("baz/src/lib.rs", "") + .build(); + + p.cargo("build").run(); +} diff --git a/tests/testsuite/proc_macro.rs b/tests/testsuite/proc_macro.rs new file mode 100644 index 00000000000..05fe15450f0 --- /dev/null +++ b/tests/testsuite/proc_macro.rs @@ -0,0 +1,440 @@ +use crate::support::is_nightly; +use crate::support::project; + +#[cargo_test] +fn probe_cfg_before_crate_type_discovery() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [target.'cfg(not(stage300))'.dependencies.noop] + path = "../noop" + "#, + ) + .file( + "src/main.rs", + r#" + #[macro_use] + extern crate noop; + + #[derive(Noop)] + struct X; + + fn main() {} + "#, + ) + .build(); + let _noop = project() + .at("noop") + .file( + "Cargo.toml", + r#" + [package] + name = "noop" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn noop() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.noop] + path = "../noop" + "#, + ) + .file( + "src/main.rs", + r#" + #[macro_use] + extern crate noop; + + #[derive(Noop)] + struct X; + + fn main() {} + "#, + ) + .build(); + let _noop = project() + .at("noop") + .file( + "Cargo.toml", + r#" + [package] + name = "noop" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .build(); + + p.cargo("build").run(); + p.cargo("build").run(); +} + +#[cargo_test] +fn impl_and_derive() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.transmogrify] + path = "../transmogrify" + "#, + ) + .file( + "src/main.rs", + r#" + #[macro_use] + extern crate transmogrify; + + trait ImplByTransmogrify { + fn impl_by_transmogrify(&self) -> bool; + } + + #[derive(Transmogrify, Debug)] + struct X { success: bool } + + fn main() { + let x = X::new(); + assert!(x.impl_by_transmogrify()); + println!("{:?}", x); + } + "#, + ) + .build(); + let _transmogrify = project() + .at("transmogrify") + .file( + "Cargo.toml", + r#" + [package] + name = "transmogrify" + version = "0.0.1" + authors = [] + + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Transmogrify)] + #[doc(hidden)] + pub fn transmogrify(input: TokenStream) -> TokenStream { + " + impl X { + fn new() -> Self { + X { success: true } + } + } + + impl ImplByTransmogrify for X { + fn impl_by_transmogrify(&self) -> bool { + true + } + } + ".parse().unwrap() + } + "#, + ) + .build(); + + p.cargo("build").run(); + p.cargo("run").with_stdout("X { success: true }").run(); +} + +#[cargo_test] +fn plugin_and_proc_macro() { + if !is_nightly() { + // plugins are unstable + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + plugin = true + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" + #![feature(plugin_registrar, rustc_private)] + #![feature(proc_macro, proc_macro_lib)] + + extern crate rustc_plugin; + use rustc_plugin::Registry; + + extern crate proc_macro; + use proc_macro::TokenStream; + + #[plugin_registrar] + pub fn plugin_registrar(reg: &mut Registry) {} + + #[proc_macro_derive(Questionable)] + pub fn questionable(input: TokenStream) -> TokenStream { + input + } + "#, + ) + .build(); + + let msg = " `lib.plugin` and `lib.proc-macro` cannot both be `true`"; + p.cargo("build") + .with_status(101) + .with_stderr_contains(msg) + .run(); +} + +#[cargo_test] +fn proc_macro_doctest() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + [lib] + proc-macro = true + "#, + ) + .file( + "src/lib.rs", + r#" +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +/// ``` +/// assert!(true); +/// ``` +#[proc_macro_derive(Bar)] +pub fn derive(_input: TokenStream) -> TokenStream { + "".parse().unwrap() +} + +#[test] +fn a() { + assert!(true); +} +"#, + ) + .build(); + + foo.cargo("test") + .with_stdout_contains("test a ... ok") + .with_stdout_contains_n("test [..] ... ok", 2) + .run(); +} + +#[cargo_test] +fn proc_macro_crate_type() { + // Verify that `crate-type = ["proc-macro"]` is the same as `proc-macro = true` + // and that everything, including rustdoc, works correctly. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [dependencies] + pm = { path = "pm" } + "#, + ) + .file( + "src/lib.rs", + r#" + //! ``` + //! use foo::THING; + //! assert_eq!(THING, 123); + //! ``` + #[macro_use] + extern crate pm; + #[derive(MkItem)] + pub struct S; + #[cfg(test)] + mod tests { + use super::THING; + #[test] + fn it_works() { + assert_eq!(THING, 123); + } + } + "#, + ) + .file( + "pm/Cargo.toml", + r#" + [package] + name = "pm" + version = "0.1.0" + [lib] + crate-type = ["proc-macro"] + "#, + ) + .file( + "pm/src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(MkItem)] + pub fn mk_item(_input: TokenStream) -> TokenStream { + "pub const THING: i32 = 123;".parse().unwrap() + } + "#, + ) + .build(); + + foo.cargo("test") + .with_stdout_contains("test tests::it_works ... ok") + .with_stdout_contains_n("test [..] ... ok", 2) + .run(); +} + +#[cargo_test] +fn proc_macro_crate_type_warning() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [lib] + crate-type = ["proc-macro"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + foo.cargo("build") + .with_stderr_contains( + "[WARNING] library `foo` should only specify `proc-macro = true` instead of setting `crate-type`") + .run(); +} + +#[cargo_test] +fn proc_macro_crate_type_warning_plugin() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [lib] + crate-type = ["proc-macro"] + plugin = true + "#, + ) + .file("src/lib.rs", "") + .build(); + + foo.cargo("build") + .with_stderr_contains( + "[WARNING] proc-macro library `foo` should not specify `plugin = true`") + .with_stderr_contains( + "[WARNING] library `foo` should only specify `proc-macro = true` instead of setting `crate-type`") + .run(); +} + +#[cargo_test] +fn proc_macro_crate_type_multiple() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + [lib] + crate-type = ["proc-macro", "rlib"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + foo.cargo("build") + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` + +Caused by: + cannot mix `proc-macro` crate type with others +", + ) + .with_status(101) + .run(); +} diff --git a/tests/testsuite/profile_config.rs b/tests/testsuite/profile_config.rs new file mode 100644 index 00000000000..6513177737b --- /dev/null +++ b/tests/testsuite/profile_config.rs @@ -0,0 +1,391 @@ +use crate::support::{basic_lib_manifest, is_nightly, paths, project}; + +#[cargo_test] +fn profile_config_gated() { + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [profile.dev] + debug = 1 + "#, + ) + .build(); + + p.cargo("build -v") + .with_stderr_contains( + "\ +[WARNING] profiles in config files require `-Z config-profile` command-line option +", + ) + .with_stderr_contains("[..]-C debuginfo=2[..]") + .run(); +} + +#[cargo_test] +fn profile_config_validate_warnings() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [profile.test] + opt-level = 3 + + [profile.asdf] + opt-level = 3 + + [profile.dev] + bad-key = true + + [profile.dev.build-override] + bad-key-bo = true + + [profile.dev.overrides.bar] + bad-key-bar = true + "#, + ) + .build(); + + p.cargo("build -Z config-profile") + .masquerade_as_nightly_cargo() + .with_stderr_unordered( + "\ +[WARNING] unused key `profile.asdf` in config file `[..].cargo/config` +[WARNING] unused key `profile.test` in config file `[..].cargo/config` +[WARNING] unused key `profile.dev.bad-key` in config file `[..].cargo/config` +[WARNING] unused key `profile.dev.overrides.bar.bad-key-bar` in config file `[..].cargo/config` +[WARNING] unused key `profile.dev.build-override.bad-key-bo` in config file `[..].cargo/config` +[COMPILING] foo [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_config_error_paths() { + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [profile.dev] + opt-level = 3 + "#, + ) + .file( + paths::home().join(".cargo/config"), + r#" + [profile.dev] + rpath = "foo" + "#, + ) + .build(); + + p.cargo("build -Z config-profile") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[CWD]/Cargo.toml` + +Caused by: + error in [..].cargo/config: `profile.dev.rpath` expected true/false, but found a string +", + ) + .run(); +} + +#[cargo_test] +fn profile_config_validate_errors() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [profile.dev.overrides.foo] + panic = "abort" + "#, + ) + .build(); + + p.cargo("build -Z config-profile") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +[ERROR] config profile `profile.dev` is not valid + +Caused by: + `panic` may not be specified in a profile override. +", + ) + .run(); +} + +#[cargo_test] +fn profile_config_syntax_errors() { + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [profile.dev] + codegen-units = "foo" + "#, + ) + .build(); + + p.cargo("build -Z config-profile") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at [..] + +Caused by: + error in [..].cargo/config: `profile.dev.codegen-units` expected an integer, but found a string +", + ) + .run(); +} + +#[cargo_test] +fn profile_config_override_spec_multiple() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + ".cargo/config", + r#" + [profile.dev.overrides.bar] + opt-level = 3 + + [profile.dev.overrides."bar:0.5.0"] + opt-level = 3 + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "bar" + version = "0.5.0" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + // Unfortunately this doesn't tell you which file, hopefully it's not too + // much of a problem. + p.cargo("build -v -Z config-profile") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +[ERROR] multiple profile overrides in profile `dev` match package `bar v0.5.0 ([..])` +found profile override specs: bar, bar:0.5.0", + ) + .run(); +} + +#[cargo_test] +fn profile_config_all_options() { + if !is_nightly() { + // May be removed once 1.34 is stable (added support for incremental-LTO). + return; + } + + // Ensure all profile options are supported. + let p = project() + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [profile.release] + opt-level = 1 + debug = true + debug-assertions = true + overflow-checks = false + rpath = true + lto = true + codegen-units = 2 + panic = "abort" + incremental = true + "#, + ) + .build(); + + p.cargo("build --release -v -Z config-profile") + .masquerade_as_nightly_cargo() + .env_remove("CARGO_INCREMENTAL") + .with_stderr( + "\ +[COMPILING] foo [..] +[RUNNING] `rustc --crate-name foo [..] \ + -C opt-level=1 \ + -C panic=abort \ + -C lto \ + -C codegen-units=2 \ + -C debuginfo=2 \ + -C debug-assertions=on \ + -C overflow-checks=off [..]\ + -C rpath [..]\ + -C incremental=[..] +[FINISHED] release [optimized + debuginfo] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_config_override_precedence() { + // Config values take precedence over manifest values. + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = {path = "bar"} + + [profile.dev] + codegen-units = 2 + + [profile.dev.overrides.bar] + opt-level = 3 + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "bar" + version = "0.0.1" + "#, + ) + .file("bar/src/lib.rs", "") + .file( + ".cargo/config", + r#" + [profile.dev.overrides.bar] + opt-level = 2 + "#, + ) + .build(); + + p.cargo("build -v -Z config-profile") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[COMPILING] bar [..] +[RUNNING] `rustc --crate-name bar [..] -C opt-level=2 -C codegen-units=2 [..] +[COMPILING] foo [..] +[RUNNING] `rustc --crate-name foo [..]-C codegen-units=2 [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); +} + +#[cargo_test] +fn profile_config_no_warn_unknown_override() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [profile.dev.overrides.bar] + codegen-units = 4 + "#, + ) + .build(); + + p.cargo("build -Z config-profile") + .masquerade_as_nightly_cargo() + .with_stderr_does_not_contain("[..]warning[..]") + .run(); +} + +#[cargo_test] +fn profile_config_mixed_types() { + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [profile.dev] + opt-level = 3 + "#, + ) + .file( + paths::home().join(".cargo/config"), + r#" + [profile.dev] + opt-level = 's' + "#, + ) + .build(); + + p.cargo("build -v -Z config-profile") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[..]-C opt-level=3 [..]") + .run(); +} diff --git a/tests/testsuite/profile_overrides.rs b/tests/testsuite/profile_overrides.rs new file mode 100644 index 00000000000..8445460b571 --- /dev/null +++ b/tests/testsuite/profile_overrides.rs @@ -0,0 +1,509 @@ +use crate::support::registry::Package; +use crate::support::{basic_lib_manifest, basic_manifest, project}; + +#[cargo_test] +fn profile_override_gated() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.dev.build-override] + opt-level = 3 + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + feature `profile-overrides` is required + +consider adding `cargo-features = [\"profile-overrides\"]` to the manifest +", + ) + .run(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.dev.overrides."*"] + opt-level = 3 + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + feature `profile-overrides` is required + +consider adding `cargo-features = [\"profile-overrides\"]` to the manifest +", + ) + .run(); +} + +#[cargo_test] +fn profile_override_basic() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = {path = "bar"} + + [profile.dev] + opt-level = 1 + + [profile.dev.overrides.bar] + opt-level = 3 + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build -v") + .masquerade_as_nightly_cargo() + .with_stderr( + "[COMPILING] bar [..] +[RUNNING] `rustc --crate-name bar [..] -C opt-level=3 [..]` +[COMPILING] foo [..] +[RUNNING] `rustc --crate-name foo [..] -C opt-level=1 [..]` +[FINISHED] dev [optimized + debuginfo] target(s) in [..]", + ) + .run(); +} + +#[cargo_test] +fn profile_override_warnings() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = {path = "bar"} + + [profile.dev.overrides.bart] + opt-level = 3 + + [profile.dev.overrides.no-suggestion] + opt-level = 3 + + [profile.dev.overrides."bar:1.2.3"] + opt-level = 3 + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build").masquerade_as_nightly_cargo().with_stderr_contains( + "\ +[WARNING] version or URL in profile override spec `bar:1.2.3` does not match any of the packages: bar v0.5.0 ([..]) +[WARNING] profile override spec `bart` did not match any packages + +Did you mean `bar`? +[WARNING] profile override spec `no-suggestion` did not match any packages +[COMPILING] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_override_dev_release_only() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = {path = "bar"} + + [profile.test.overrides.bar] + opt-level = 3 + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains( + "\ +Caused by: + Profile overrides may only be specified for `dev` or `release` profile, not `test`. +", + ) + .run(); +} + +#[cargo_test] +fn profile_override_bad_settings() { + let bad_values = [ + ( + "panic = \"abort\"", + "`panic` may not be specified in a profile override.", + ), + ( + "lto = true", + "`lto` may not be specified in a profile override.", + ), + ( + "rpath = true", + "`rpath` may not be specified in a profile override.", + ), + ("overrides = {}", "Profile overrides cannot be nested."), + ]; + for &(snippet, expected) in bad_values.iter() { + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = {{path = "bar"}} + + [profile.dev.overrides.bar] + {} + "#, + snippet + ), + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains(format!("Caused by:\n {}", expected)) + .run(); + } +} + +#[cargo_test] +fn profile_override_hierarchy() { + // Test that the precedence rules are correct for different types. + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [workspace] + members = ["m1", "m2", "m3"] + + [profile.dev] + codegen-units = 1 + + [profile.dev.overrides.m2] + codegen-units = 2 + + [profile.dev.overrides."*"] + codegen-units = 3 + + [profile.dev.build-override] + codegen-units = 4 + "#, + ) + // m1 + .file( + "m1/Cargo.toml", + r#" + [package] + name = "m1" + version = "0.0.1" + + [dependencies] + m2 = { path = "../m2" } + dep = { path = "../../dep" } + "#, + ) + .file("m1/src/lib.rs", "extern crate m2; extern crate dep;") + .file("m1/build.rs", "fn main() {}") + // m2 + .file( + "m2/Cargo.toml", + r#" + [package] + name = "m2" + version = "0.0.1" + + [dependencies] + m3 = { path = "../m3" } + + [build-dependencies] + m3 = { path = "../m3" } + dep = { path = "../../dep" } + "#, + ) + .file("m2/src/lib.rs", "extern crate m3;") + .file( + "m2/build.rs", + "extern crate m3; extern crate dep; fn main() {}", + ) + // m3 + .file("m3/Cargo.toml", &basic_lib_manifest("m3")) + .file("m3/src/lib.rs", "") + .build(); + + // dep (outside of workspace) + let _dep = project() + .at("dep") + .file("Cargo.toml", &basic_lib_manifest("dep")) + .file("src/lib.rs", "") + .build(); + + // Profiles should be: + // m3: 4 (as build.rs dependency) + // m3: 1 (as [profile.dev] as workspace member) + // dep: 3 (as [profile.dev.overrides."*"] as non-workspace member) + // m1 build.rs: 4 (as [profile.dev.build-override]) + // m2 build.rs: 2 (as [profile.dev.overrides.m2]) + // m2: 2 (as [profile.dev.overrides.m2]) + // m1: 1 (as [profile.dev]) + + p.cargo("build -v").masquerade_as_nightly_cargo().with_stderr_unordered("\ +[COMPILING] m3 [..] +[COMPILING] dep [..] +[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=4 [..] +[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=3 [..] +[RUNNING] `rustc --crate-name m3 m3/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..] +[RUNNING] `rustc --crate-name build_script_build m1/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=4 [..] +[COMPILING] m2 [..] +[RUNNING] `rustc --crate-name build_script_build m2/build.rs --color never --crate-type bin --emit=[..]link -C codegen-units=2 [..] +[RUNNING] `[..]/m1-[..]/build-script-build` +[RUNNING] `[..]/m2-[..]/build-script-build` +[RUNNING] `rustc --crate-name m2 m2/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=2 [..] +[COMPILING] m1 [..] +[RUNNING] `rustc --crate-name m1 m1/src/lib.rs --color never --crate-type lib --emit=[..]link -C codegen-units=1 [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_override_spec_multiple() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { path = "bar" } + + [profile.dev.overrides.bar] + opt-level = 3 + + [profile.dev.overrides."bar:0.5.0"] + opt-level = 3 + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build -v") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr_contains( + "\ +[ERROR] multiple profile overrides in profile `dev` match package `bar v0.5.0 ([..])` +found profile override specs: bar, bar:0.5.0", + ) + .run(); +} + +#[cargo_test] +fn profile_override_spec() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + + [workspace] + members = ["m1", "m2"] + + [profile.dev.overrides."dep:1.0.0"] + codegen-units = 1 + + [profile.dev.overrides."dep:2.0.0"] + codegen-units = 2 + "#, + ) + // m1 + .file( + "m1/Cargo.toml", + r#" + [package] + name = "m1" + version = "0.0.1" + + [dependencies] + dep = { path = "../../dep1" } + "#, + ) + .file("m1/src/lib.rs", "extern crate dep;") + // m2 + .file( + "m2/Cargo.toml", + r#" + [package] + name = "m2" + version = "0.0.1" + + [dependencies] + dep = {path = "../../dep2" } + "#, + ) + .file("m2/src/lib.rs", "extern crate dep;") + .build(); + + project() + .at("dep1") + .file("Cargo.toml", &basic_manifest("dep", "1.0.0")) + .file("src/lib.rs", "") + .build(); + + project() + .at("dep2") + .file("Cargo.toml", &basic_manifest("dep", "2.0.0")) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .masquerade_as_nightly_cargo() + .with_stderr_contains("[RUNNING] `rustc [..]dep1/src/lib.rs [..] -C codegen-units=1 [..]") + .with_stderr_contains("[RUNNING] `rustc [..]dep2/src/lib.rs [..] -C codegen-units=2 [..]") + .run(); +} + +#[cargo_test] +fn override_proc_macro() { + Package::new("shared", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["profile-overrides"] + [package] + name = "foo" + version = "0.1.0" + edition = "2018" + + [dependencies] + shared = "1.0" + pm = {path = "pm"} + + [profile.dev.build-override] + codegen-units = 4 + "#, + ) + .file("src/lib.rs", r#"pm::eat!{}"#) + .file( + "pm/Cargo.toml", + r#" + [package] + name = "pm" + version = "0.1.0" + + [lib] + proc-macro = true + + [dependencies] + shared = "1.0" + "#, + ) + .file( + "pm/src/lib.rs", + r#" + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro] + pub fn eat(_item: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .build(); + + p.cargo("build -v") + .masquerade_as_nightly_cargo() + // Shared built for the proc-macro. + .with_stderr_contains("[RUNNING] `rustc [..]--crate-name shared [..]-C codegen-units=4[..]") + // Shared built for the library. + .with_stderr_line_without( + &["[RUNNING] `rustc --crate-name shared"], + &["-C codegen-units"], + ) + .with_stderr_contains("[RUNNING] `rustc [..]--crate-name pm [..]-C codegen-units=4[..]") + .with_stderr_line_without( + &["[RUNNING] `rustc [..]--crate-name foo"], + &["-C codegen-units"], + ) + .run(); +} diff --git a/tests/testsuite/profile_targets.rs b/tests/testsuite/profile_targets.rs new file mode 100644 index 00000000000..7a85f481401 --- /dev/null +++ b/tests/testsuite/profile_targets.rs @@ -0,0 +1,648 @@ +use crate::support::{basic_manifest, project, Project}; + +// These tests try to exercise exactly which profiles are selected for every target. + +fn all_target_project() -> Project { + // This abuses the `codegen-units` setting so that we can verify exactly + // which profile is used for each compiler invocation. + project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { path = "bar" } + + [build-dependencies] + bdep = { path = "bdep" } + + [profile.dev] + codegen-units = 1 + panic = "abort" + [profile.release] + codegen-units = 2 + panic = "abort" + [profile.test] + codegen-units = 3 + [profile.bench] + codegen-units = 4 + "#, + ) + .file("src/lib.rs", "extern crate bar;") + .file("src/main.rs", "extern crate foo; fn main() {}") + .file("examples/ex1.rs", "extern crate foo; fn main() {}") + .file("tests/test1.rs", "extern crate foo;") + .file("benches/bench1.rs", "extern crate foo;") + .file( + "build.rs", + r#" + extern crate bdep; + fn main() { + eprintln!("foo custom build PROFILE={} DEBUG={} OPT_LEVEL={}", + std::env::var("PROFILE").unwrap(), + std::env::var("DEBUG").unwrap(), + std::env::var("OPT_LEVEL").unwrap(), + ); + } + "#, + ) + // `bar` package. + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + // `bdep` package. + .file( + "bdep/Cargo.toml", + r#" + [package] + name = "bdep" + version = "0.0.1" + + [dependencies] + bar = { path = "../bar" } + "#, + ) + .file("bdep/src/lib.rs", "extern crate bar;") + .build() +} + +#[cargo_test] +fn profile_selection_build() { + let p = all_target_project(); + + // `build` + // NOTES: + // - bdep `panic` is not set because it thinks `build.rs` is a plugin. + // - build_script_build is built without panic because it thinks `build.rs` is a plugin. + p.cargo("build -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] bdep [..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +").run(); + p.cargo("build -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_build_release() { + let p = all_target_project(); + + // `build --release` + p.cargo("build --release -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[COMPILING] bdep [..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[FINISHED] release [optimized] [..] +").run(); + p.cargo("build --release -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] release [optimized] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_build_all_targets() { + let p = all_target_project(); + // `build` + // NOTES: + // - bdep `panic` is not set because it thinks `build.rs` is a plugin. + // - build_script_build is built without panic because it thinks + // `build.rs` is a plugin. + // - Benchmark dependencies are compiled in `dev` mode, which may be + // surprising. See issue rust-lang/cargo#4929. + // + // - Dependency profiles: + // Pkg Target Profile Reason + // --- ------ ------- ------ + // bar lib dev For foo-bin + // bar lib dev-panic For tests/benches and bdep + // bdep lib dev-panic For foo build.rs + // foo custom dev-panic + // + // - `foo` target list is: + // Target Profile Mode + // ------ ------- ---- + // lib dev+panic build (a normal lib target) + // lib dev-panic build (used by tests/benches) + // lib test test + // test test test + // bench test test + // bin test test + // bin dev build + // example dev build + p.cargo("build --all-targets -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] bdep [..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]` +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C codegen-units=3 -C debuginfo=2 --test [..]` +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..]` +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C codegen-units=3 -C debuginfo=2 --test [..]` +[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C codegen-units=3 -C debuginfo=2 --test [..]` +[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C codegen-units=3 -C debuginfo=2 --test [..]` +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]` +[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..]` +[FINISHED] dev [unoptimized + debuginfo] [..] +").run(); + p.cargo("build -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_build_all_targets_release() { + let p = all_target_project(); + // `build --all-targets --release` + // NOTES: + // - bdep `panic` is not set because it thinks `build.rs` is a plugin. + // - bar compiled twice. It tries with and without panic, but the "is a + // plugin" logic is forcing it to be cleared. + // - build_script_build is built without panic because it thinks + // `build.rs` is a plugin. + // - build_script_build is being run two times. Once for the `dev` and + // `test` targets, once for the `bench` targets. + // TODO: "PROFILE" says debug both times, though! + // + // - Dependency profiles: + // Pkg Target Profile Reason + // --- ------ ------- ------ + // bar lib release For foo-bin + // bar lib release-panic For tests/benches and bdep + // bdep lib release-panic For foo build.rs + // foo custom release-panic + // + // - `foo` target list is: + // Target Profile Mode + // ------ ------- ---- + // lib release+panic build (a normal lib target) + // lib release-panic build (used by tests/benches) + // lib bench test (bench/test de-duped) + // test bench test + // bench bench test + // bin bench test (bench/test de-duped) + // bin release build + // example release build + p.cargo("build --all-targets --release -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[COMPILING] bdep [..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]` +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..]` +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..]` +[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..]` +[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..]` +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..]` +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]` +[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..]` +[FINISHED] release [optimized] [..] +").run(); + p.cargo("build --all-targets --release -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] release [optimized] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_test() { + let p = all_target_project(); + // `test` + // NOTES: + // - Dependency profiles: + // Pkg Target Profile Reason + // --- ------ ------- ------ + // bar lib dev For foo-bin + // bar lib dev-panic For tests/benches and bdep + // bdep lib dev-panic For foo build.rs + // foo custom dev-panic + // + // - `foo` target list is: + // Target Profile Mode + // ------ ------- ---- + // lib dev-panic build (for tests) + // lib dev build (for bins) + // lib test test + // test test test + // example dev-panic build + // bin test test + // bin dev build + // + p.cargo("test -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] bdep [..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C codegen-units=3 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C codegen-units=3 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C codegen-units=3 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +[RUNNING] `[..]/deps/foo-[..]` +[RUNNING] `[..]/deps/foo-[..]` +[RUNNING] `[..]/deps/test1-[..]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..] +").run(); + p.cargo("test -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +[RUNNING] `[..]/deps/foo-[..]` +[RUNNING] `[..]/deps/foo-[..]` +[RUNNING] `[..]/deps/test1-[..]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_test_release() { + let p = all_target_project(); + // `test --release` + // NOTES: + // - Dependency profiles: + // Pkg Target Profile Reason + // --- ------ ------- ------ + // bar lib release For foo-bin + // bar lib release-panic For tests/benches and bdep + // bdep lib release-panic For foo build.rs + // foo custom release-panic + // + // - `foo` target list is: + // Target Profile Mode + // ------ ------- ---- + // lib release-panic build (for tests) + // lib release build (for bins) + // lib bench test + // test bench test + // example release-panic build + // bin bench test + // bin release build + // + p.cargo("test --release -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[COMPILING] bdep [..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..] +[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..] +[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[FINISHED] release [optimized] [..] +[RUNNING] `[..]/deps/foo-[..]` +[RUNNING] `[..]/deps/foo-[..]` +[RUNNING] `[..]/deps/test1-[..]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]` +").run(); + p.cargo("test --release -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] release [optimized] [..] +[RUNNING] `[..]/deps/foo-[..]` +[RUNNING] `[..]/deps/foo-[..]` +[RUNNING] `[..]/deps/test1-[..]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_bench() { + let p = all_target_project(); + + // `bench` + // NOTES: + // - Dependency profiles: + // Pkg Target Profile Reason + // --- ------ ------- ------ + // bar lib release For foo-bin + // bar lib release-panic For tests/benches and bdep + // bdep lib release-panic For foo build.rs + // foo custom release-panic + // + // - `foo` target list is: + // Target Profile Mode + // ------ ------- ---- + // lib release-panic build (for benches) + // lib release build (for bins) + // lib bench test(bench) + // bench bench test(bench) + // bin bench test(bench) + // bin release build + // + p.cargo("bench -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[COMPILING] bdep [..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..]target/release/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..] +[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3 -C codegen-units=4 --test [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[FINISHED] release [optimized] [..] +[RUNNING] `[..]/deps/foo-[..] --bench` +[RUNNING] `[..]/deps/foo-[..] --bench` +[RUNNING] `[..]/deps/bench1-[..] --bench` +").run(); + p.cargo("bench -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] release [optimized] [..] +[RUNNING] `[..]/deps/foo-[..] --bench` +[RUNNING] `[..]/deps/foo-[..] --bench` +[RUNNING] `[..]/deps/bench1-[..] --bench` +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_check_all_targets() { + let p = all_target_project(); + // `check` + // NOTES: + // - Dependency profiles: + // Pkg Target Profile Action Reason + // --- ------ ------- ------ ------ + // bar lib dev* link For bdep + // bar lib dev-panic metadata For tests/benches + // bar lib dev metadata For lib/bins + // bdep lib dev* link For foo build.rs + // foo custom dev* link For build.rs + // + // `*` = wants panic, but it is cleared when args are built. + // + // - foo target list is: + // Target Profile Mode + // ------ ------- ---- + // lib dev check + // lib dev-panic check (for tests/benches) + // lib dev-panic check-test (checking lib as a unittest) + // example dev check + // test dev-panic check-test + // bench dev-panic check-test + // bin dev check + // bin dev-panic check-test (checking bin as a unittest) + // + p.cargo("check --all-targets -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] bdep[..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +").run(); + // Starting with Rust 1.27, rustc emits `rmeta` files for bins, so + // everything should be completely fresh. Previously, bins were being + // rechecked. + // See PR rust-lang/rust#49289 and issue rust-lang/cargo#3624. + p.cargo("check --all-targets -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_check_all_targets_release() { + let p = all_target_project(); + // `check --release` + // See issue rust-lang/cargo#5218. + // This is a pretty straightforward variant of + // `profile_selection_check_all_targets` that uses `release` instead of + // `dev` for all targets. + p.cargo("check --all-targets --release -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[COMPILING] bdep[..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..]target/release/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata -C opt-level=3 -C codegen-units=2 --test [..] +[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata -C opt-level=3 -C codegen-units=2 --test [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata -C opt-level=3 -C codegen-units=2 --test [..] +[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata -C opt-level=3 -C codegen-units=2 --test [..] +[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]metadata -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]metadata -C opt-level=3 -C panic=abort -C codegen-units=2 [..] +[FINISHED] release [optimized] [..] +").run(); + + p.cargo("check --all-targets --release -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] release [optimized] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_check_all_targets_test() { + let p = all_target_project(); + // `check --profile=test` + // NOTES: + // - This doesn't actually use the "test" profile. Everything uses "dev". + // It should probably use "test", although it probably doesn't really matter. + // - Dependency profiles: + // Pkg Target Profile Action Reason + // --- ------ ------- ------ ------ + // bar lib dev* link For bdep + // bar lib dev-panic metadata For tests/benches + // bdep lib dev* link For foo build.rs + // foo custom dev* link For build.rs + // + // `*` = wants panic, but it is cleared when args are built. + // + // - foo target list is: + // Target Profile Mode + // ------ ------- ---- + // lib dev-panic check-test (for tests/benches) + // lib dev-panic check-test (checking lib as a unittest) + // example dev-panic check-test + // test dev-panic check-test + // bench dev-panic check-test + // bin dev-panic check-test + // + p.cargo("check --all-targets --profile=test -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] bdep[..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--emit=[..]metadata -C codegen-units=1 -C debuginfo=2 --test [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +").run(); + + p.cargo("check --all-targets --profile=test -vv") + .with_stderr_unordered( + "\ +[FRESH] bar [..] +[FRESH] bdep [..] +[FRESH] foo [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +", + ) + .run(); +} + +#[cargo_test] +fn profile_selection_doc() { + let p = all_target_project(); + // `doc` + // NOTES: + // - Dependency profiles: + // Pkg Target Profile Action Reason + // --- ------ ------- ------ ------ + // bar lib dev* link For bdep + // bar lib dev metadata For rustdoc + // bdep lib dev* link For foo build.rs + // foo custom dev* link For build.rs + // + // `*` = wants panic, but it is cleared when args are built. + p.cargo("doc -vv").with_stderr_unordered("\ +[COMPILING] bar [..] +[DOCUMENTING] bar [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `rustdoc --crate-name bar bar/src/lib.rs [..] +[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] bdep [..] +[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[COMPILING] foo [..] +[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link -C codegen-units=1 -C debuginfo=2 [..] +[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` +[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 +[DOCUMENTING] foo [..] +[RUNNING] `rustdoc --crate-name foo src/lib.rs [..] +[FINISHED] dev [unoptimized + debuginfo] [..] +").run(); +} diff --git a/tests/testsuite/profiles.rs b/tests/testsuite/profiles.rs new file mode 100644 index 00000000000..ad7c5c63b2c --- /dev/null +++ b/tests/testsuite/profiles.rs @@ -0,0 +1,439 @@ +use std::env; + +use crate::support::project; + +#[cargo_test] +fn profile_overrides() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = 1 + debug = false + rpath = true + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] test v0.0.0 ([CWD]) +[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \ + --emit=[..]link \ + -C opt-level=1 \ + -C debug-assertions=on \ + -C metadata=[..] \ + -C rpath \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [optimized] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn opt_level_override_0() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = 0 + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] test v0.0.0 ([CWD]) +[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \ + --emit=[..]link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] [..] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn debug_override_1() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + debug = 1 + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] test v0.0.0 ([CWD]) +[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \ + --emit=[..]link \ + -C debuginfo=1 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] [..] target(s) in [..] +", + ) + .run(); +} + +fn check_opt_level_override(profile_level: &str, rustc_level: &str) { + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.dev] + opt-level = {level} + "#, + level = profile_level + ), + ) + .file("src/lib.rs", "") + .build(); + p.cargo("build -v") + .with_stderr(&format!( + "\ +[COMPILING] test v0.0.0 ([CWD]) +[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \ + --emit=[..]link \ + -C opt-level={level} \ + -C debuginfo=2 \ + -C debug-assertions=on \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] [..] target(s) in [..] +", + level = rustc_level + )) + .run(); +} + +#[cargo_test] +fn opt_level_overrides() { + for &(profile_level, rustc_level) in &[ + ("1", "1"), + ("2", "2"), + ("3", "3"), + ("\"s\"", "s"), + ("\"z\"", "z"), + ] { + check_opt_level_override(profile_level, rustc_level) + } +} + +#[cargo_test] +fn top_level_overrides_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + + name = "test" + version = "0.0.0" + authors = [] + + [profile.release] + opt-level = 1 + debug = true + + [dependencies.foo] + path = "foo" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + + name = "foo" + version = "0.0.0" + authors = [] + + [profile.release] + opt-level = 0 + debug = false + + [lib] + name = "foo" + crate_type = ["dylib", "rlib"] + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + p.cargo("build -v --release") + .with_stderr(&format!( + "\ +[COMPILING] foo v0.0.0 ([CWD]/foo) +[RUNNING] `rustc --crate-name foo foo/src/lib.rs --color never \ + --crate-type dylib --crate-type rlib \ + --emit=[..]link \ + -C prefer-dynamic \ + -C opt-level=1 \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [CWD]/target/release/deps \ + -L dependency=[CWD]/target/release/deps` +[COMPILING] test v0.0.0 ([CWD]) +[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \ + --emit=[..]link \ + -C opt-level=1 \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/release/deps \ + --extern foo=[CWD]/target/release/deps/\ + {prefix}foo[..]{suffix} \ + --extern foo=[CWD]/target/release/deps/libfoo.rlib` +[FINISHED] release [optimized + debuginfo] target(s) in [..] +", + prefix = env::consts::DLL_PREFIX, + suffix = env::consts::DLL_SUFFIX + )) + .run(); +} + +#[cargo_test] +fn profile_in_non_root_manifest_triggers_a_warning() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + + [profile.dev] + debug = false + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + + [profile.dev] + opt-level = 1 + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .cwd("bar") + .with_stderr( + "\ +[WARNING] profiles for the non root package will be ignored, specify profiles at the workspace root: +package: [..] +workspace: [..] +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized] target(s) in [..]", + ) + .run(); +} + +#[cargo_test] +fn profile_in_virtual_manifest_works() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + + [profile.dev] + opt-level = 1 + debug = false + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v") + .cwd("bar") + .with_stderr( + "\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [optimized] target(s) in [..]", + ) + .run(); +} + +#[cargo_test] +fn profile_panic_test_bench() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [profile.test] + panic = "abort" + + [profile.bench] + panic = "abort" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr_contains( + "\ +[WARNING] `panic` setting is ignored for `test` profile +[WARNING] `panic` setting is ignored for `bench` profile +", + ) + .run(); +} + +#[cargo_test] +fn profile_doc_deprecated() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [profile.doc] + opt-level = 0 + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_stderr_contains("[WARNING] profile `doc` is deprecated and has no effect") + .run(); +} + +#[cargo_test] +fn panic_unwind_does_not_build_twice() { + // Check for a bug where `lib` was built twice, once with panic set and + // once without. Since "unwind" is the default, they are the same and + // should only be built once. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [profile.dev] + panic = "unwind" + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("tests/t1.rs", "") + .build(); + + p.cargo("test -v --tests --no-run") + .with_stderr_unordered( + "\ +[COMPILING] foo [..] +[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] +[RUNNING] `rustc --crate-name foo src/lib.rs [..] --test [..] +[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin [..] +[RUNNING] `rustc --crate-name foo src/main.rs [..] --test [..] +[RUNNING] `rustc --crate-name t1 tests/t1.rs [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn debug_0_report() { + // The finished line handles 0 correctly. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [profile.dev] + debug = 0 + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.1.0 [..] +[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C debuginfo=0 [..] +[FINISHED] dev [unoptimized] target(s) in [..] +", + ) + .run(); +} diff --git a/tests/testsuite/pub_priv.rs b/tests/testsuite/pub_priv.rs new file mode 100644 index 00000000000..4a9f53ef2c9 --- /dev/null +++ b/tests/testsuite/pub_priv.rs @@ -0,0 +1,204 @@ +use crate::support::registry::Package; +use crate::support::{is_nightly, project}; + +#[cargo_test] +fn exported_priv_warning() { + if !is_nightly() { + return; + } + Package::new("priv_dep", "0.1.0") + .file("src/lib.rs", "pub struct FromPriv;") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["public-dependency"] + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + priv_dep = "0.1.0" + "#, + ) + .file( + "src/lib.rs", + " + extern crate priv_dep; + pub fn use_priv(_: priv_dep::FromPriv) {} + ", + ) + .build(); + + p.cargo("build --message-format=short") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] priv_dep v0.1.0 ([..]) +[COMPILING] priv_dep v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +src/lib.rs:3:13: warning: type `priv_dep::FromPriv` from private dependency 'priv_dep' in public interface +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +" + ) + .run() +} + +#[cargo_test] +fn exported_pub_dep() { + if !is_nightly() { + return; + } + Package::new("pub_dep", "0.1.0") + .file("src/lib.rs", "pub struct FromPub;") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["public-dependency"] + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + pub_dep = {version = "0.1.0", public = true} + "#, + ) + .file( + "src/lib.rs", + " + extern crate pub_dep; + pub fn use_pub(_: pub_dep::FromPub) {} + ", + ) + .build(); + + p.cargo("build --message-format=short") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] pub_dep v0.1.0 ([..]) +[COMPILING] pub_dep v0.1.0 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run() +} + +#[cargo_test] +pub fn requires_nightly_cargo() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["public-dependency"] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build --message-format=short") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + the cargo feature `public-dependency` requires a nightly version of Cargo, but this is the `stable` channel +See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information about Rust release channels. +" + ) + .run() +} + +#[cargo_test] +fn requires_feature() { + Package::new("pub_dep", "0.1.0") + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + pub_dep = { version = "0.1.0", public = true } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build --message-format=short") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + feature `public-dependency` is required + +consider adding `cargo-features = [\"public-dependency\"]` to the manifest +", + ) + .run() +} + +#[cargo_test] +fn pub_dev_dependency() { + Package::new("pub_dep", "0.1.0") + .file("src/lib.rs", "pub struct FromPub;") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["public-dependency"] + + [package] + name = "foo" + version = "0.0.1" + + [dev-dependencies] + pub_dep = {version = "0.1.0", public = true} + "#, + ) + .file( + "src/lib.rs", + " + extern crate pub_dep; + pub fn use_pub(_: pub_dep::FromPub) {} + ", + ) + .build(); + + p.cargo("build --message-format=short") + .masquerade_as_nightly_cargo() + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + 'public' specifier can only be used on regular dependencies, not Development dependencies +", + ) + .run() +} diff --git a/tests/testsuite/publish.rs b/tests/testsuite/publish.rs new file mode 100644 index 00000000000..15a5a9d1204 --- /dev/null +++ b/tests/testsuite/publish.rs @@ -0,0 +1,1024 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use crate::support::git::repo; +use crate::support::paths; +use crate::support::registry::{self, registry_path, registry_url, Package}; +use crate::support::{basic_manifest, project, publish}; + +const CLEAN_FOO_JSON: &str = r#" + { + "authors": [], + "badges": {}, + "categories": [], + "deps": [], + "description": "foo", + "documentation": "foo", + "features": {}, + "homepage": "foo", + "keywords": [], + "license": "MIT", + "license_file": null, + "links": null, + "name": "foo", + "readme": null, + "readme_file": null, + "repository": "foo", + "vers": "0.0.1" + } +"#; + +fn validate_upload_foo() { + publish::validate_upload( + r#" + { + "authors": [], + "badges": {}, + "categories": [], + "deps": [], + "description": "foo", + "documentation": null, + "features": {}, + "homepage": null, + "keywords": [], + "license": "MIT", + "license_file": null, + "links": null, + "name": "foo", + "readme": null, + "readme_file": null, + "repository": null, + "vers": "0.0.1" + } + "#, + "foo-0.0.1.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + ); +} + +fn validate_upload_foo_clean() { + publish::validate_upload( + CLEAN_FOO_JSON, + "foo-0.0.1.crate", + &[ + "Cargo.lock", + "Cargo.toml", + "Cargo.toml.orig", + "src/main.rs", + ".cargo_vcs_info.json", + ], + ); +} + +#[cargo_test] +fn simple() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --no-verify --index") + .arg(registry_url().to_string()) + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ([CWD]) +[UPLOADING] foo v0.0.1 ([CWD]) +", + reg = registry::registry_path().to_str().unwrap() + )) + .run(); + + validate_upload_foo(); +} + +#[cargo_test] +fn old_token_location() { + // Check that the `token` key works at the root instead of under a + // `[registry]` table. + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + let credentials = paths::home().join(".cargo/credentials"); + fs::remove_file(&credentials).unwrap(); + + // Verify can't publish without a token. + p.cargo("publish --no-verify --index") + .arg(registry_url().to_string()) + .with_status(101) + .with_stderr_contains("[ERROR] no upload token found, please run `cargo login`") + .run(); + + File::create(&credentials) + .unwrap() + .write_all(br#"token = "api-token""#) + .unwrap(); + + p.cargo("publish --no-verify --index") + .arg(registry_url().to_string()) + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ([CWD]) +[UPLOADING] foo v0.0.1 ([CWD]) +", + reg = registry_path().to_str().unwrap() + )) + .run(); + + validate_upload_foo(); +} + +// TODO: Deprecated +// remove once it has been decided --host can be removed +#[cargo_test] +fn simple_with_host() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --no-verify --host") + .arg(registry_url().to_string()) + .with_stderr(&format!( + "\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] `{reg}` index +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ([CWD]) +[UPLOADING] foo v0.0.1 ([CWD]) +", + reg = registry_path().to_str().unwrap() + )) + .run(); + + validate_upload_foo(); +} + +// TODO: Deprecated +// remove once it has been decided --host can be removed +#[cargo_test] +fn simple_with_index_and_host() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --no-verify --index") + .arg(registry_url().to_string()) + .arg("--host") + .arg(registry_url().to_string()) + .with_stderr(&format!( + "\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] `{reg}` index +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ([CWD]) +[UPLOADING] foo v0.0.1 ([CWD]) +", + reg = registry_path().to_str().unwrap() + )) + .run(); + + validate_upload_foo(); +} + +#[cargo_test] +fn git_deps() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.foo] + git = "git://path/to/nowhere" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish -v --no-verify --index") + .arg(registry_url().to_string()) + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +[ERROR] crates cannot be published with dependencies sourced from \ +a repository\neither publish `foo` as its own crate and \ +specify a version as a dependency or pull it into this \ +repository and specify it with a path and version\n\ +(crate `foo` has repository path `git://path/to/nowhere`)\ +", + ) + .run(); +} + +#[cargo_test] +fn path_dependency_no_version() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [dependencies.bar] + path = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("publish --index") + .arg(registry_url().to_string()) + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +[ERROR] all path dependencies must have a version specified when publishing. +dependency `bar` does not specify a version +", + ) + .run(); +} + +#[cargo_test] +fn unpublishable_crate() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = false + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --index") + .arg(registry_url().to_string()) + .with_status(101) + .with_stderr( + "\ +[ERROR] `foo` cannot be published. +The registry `crates-io` is not listed in the `publish` value in Cargo.toml. +", + ) + .run(); +} + +#[cargo_test] +fn dont_publish_dirty() { + registry::init(); + let p = project().file("bar", "").build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --index") + .arg(registry_url().to_string()) + .with_status(101) + .with_stderr( + "\ +[UPDATING] `[..]` index +error: 1 files in the working directory contain changes that were not yet \ +committed into git: + +bar + +to proceed despite this and include the uncommited changes, pass the `--allow-dirty` flag +", + ) + .run(); +} + +#[cargo_test] +fn publish_clean() { + registry::init(); + + let p = project().build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --index") + .arg(registry_url().to_string()) + .run(); + + validate_upload_foo_clean(); +} + +#[cargo_test] +fn publish_in_sub_repo() { + registry::init(); + + let p = project().no_manifest().file("baz", "").build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish") + .cwd("bar") + .arg("--index") + .arg(registry_url().to_string()) + .run(); + + validate_upload_foo_clean(); +} + +#[cargo_test] +fn publish_when_ignored() { + registry::init(); + + let p = project().file("baz", "").build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file(".gitignore", "baz") + .build(); + + p.cargo("publish --index") + .arg(registry_url().to_string()) + .run(); + + publish::validate_upload( + CLEAN_FOO_JSON, + "foo-0.0.1.crate", + &[ + "Cargo.lock", + "Cargo.toml", + "Cargo.toml.orig", + "src/main.rs", + ".gitignore", + ".cargo_vcs_info.json", + ], + ); +} + +#[cargo_test] +fn ignore_when_crate_ignored() { + registry::init(); + + let p = project().no_manifest().file("bar/baz", "").build(); + + let _ = repo(&paths::root().join("foo")) + .file(".gitignore", "bar") + .nocommit_file( + "bar/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .nocommit_file("bar/src/main.rs", "fn main() {}"); + p.cargo("publish") + .cwd("bar") + .arg("--index") + .arg(registry_url().to_string()) + .run(); + + publish::validate_upload( + CLEAN_FOO_JSON, + "foo-0.0.1.crate", + &[ + "Cargo.lock", + "Cargo.toml", + "Cargo.toml.orig", + "src/main.rs", + "baz", + ], + ); +} + +#[cargo_test] +fn new_crate_rejected() { + registry::init(); + + let p = project().file("baz", "").build(); + + let _ = repo(&paths::root().join("foo")) + .nocommit_file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .nocommit_file("src/main.rs", "fn main() {}"); + p.cargo("publish --index") + .arg(registry_url().to_string()) + .with_status(101) + .with_stderr_contains( + "[ERROR] 3 files in the working directory contain \ + changes that were not yet committed into git:", + ) + .run(); +} + +#[cargo_test] +fn dry_run() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --dry-run --index") + .arg(registry_url().to_string()) + .with_stderr( + "\ +[UPDATING] `[..]` index +[WARNING] manifest has no documentation, [..] +See [..] +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[UPLOADING] foo v0.0.1 ([CWD]) +[WARNING] aborting upload due to dry run +", + ) + .run(); + + // Ensure the API request wasn't actually made + assert!(registry::api_path().join("api/v1/crates").exists()); + assert!(!registry::api_path().join("api/v1/crates/new").exists()); +} + +#[cargo_test] +fn registry_not_in_publish_list() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = [ + "test" + ] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish") + .arg("--registry") + .arg("alternative") + .with_status(101) + .with_stderr( + "\ +[ERROR] `foo` cannot be published. +The registry `alternative` is not listed in the `publish` value in Cargo.toml. +", + ) + .run(); +} + +#[cargo_test] +fn publish_empty_list() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --registry alternative") + .with_status(101) + .with_stderr( + "\ +[ERROR] `foo` cannot be published. +The registry `alternative` is not listed in the `publish` value in Cargo.toml. +", + ) + .run(); +} + +#[cargo_test] +fn publish_allowed_registry() { + registry::init(); + + let p = project().build(); + + let _ = repo(&paths::root().join("foo")) + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + publish = ["alternative"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --registry alternative").run(); + + publish::validate_alt_upload( + CLEAN_FOO_JSON, + "foo-0.0.1.crate", + &[ + "Cargo.lock", + "Cargo.toml", + "Cargo.toml.orig", + "src/main.rs", + ".cargo_vcs_info.json", + ], + ); +} + +#[cargo_test] +fn block_publish_no_registry() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = [] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --registry alternative") + .with_status(101) + .with_stderr( + "\ +[ERROR] `foo` cannot be published. +The registry `alternative` is not listed in the `publish` value in Cargo.toml. +", + ) + .run(); +} + +#[cargo_test] +fn publish_with_crates_io_explicit() { + // Explicitly setting `crates-io` in the publish list. + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + publish = ["crates-io"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("publish --registry alternative") + .with_status(101) + .with_stderr( + "\ +[ERROR] `foo` cannot be published. +The registry `alternative` is not listed in the `publish` value in Cargo.toml. +", + ) + .run(); + + p.cargo("publish").run(); +} + +#[cargo_test] +fn publish_with_select_features() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [features] + required = [] + optional = [] + "#, + ) + .file( + "src/main.rs", + "#[cfg(not(feature = \"required\"))] + compile_error!(\"This crate requires `required` feature!\"); + fn main() {}", + ) + .build(); + + p.cargo("publish --features required --index") + .arg(registry_url().to_string()) + .with_stderr_contains("[UPLOADING] foo v0.0.1 ([CWD])") + .run(); +} + +#[cargo_test] +fn publish_with_all_features() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [features] + required = [] + optional = [] + "#, + ) + .file( + "src/main.rs", + "#[cfg(not(feature = \"required\"))] + compile_error!(\"This crate requires `required` feature!\"); + fn main() {}", + ) + .build(); + + p.cargo("publish --all-features --index") + .arg(registry_url().to_string()) + .with_stderr_contains("[UPLOADING] foo v0.0.1 ([CWD])") + .run(); +} + +#[cargo_test] +fn publish_with_no_default_features() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + + [features] + default = ["required"] + required = [] + "#, + ) + .file( + "src/main.rs", + "#[cfg(not(feature = \"required\"))] + compile_error!(\"This crate requires `required` feature!\"); + fn main() {}", + ) + .build(); + + p.cargo("publish --no-default-features --index") + .arg(registry_url().to_string()) + .with_stderr_contains("error: This crate requires `required` feature!") + .with_status(101) + .run(); +} + +#[cargo_test] +fn publish_with_patch() { + Package::new("bar", "1.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + [dependencies] + bar = "1.0" + [patch.crates-io] + bar = { path = "bar" } + "#, + ) + .file( + "src/main.rs", + "extern crate bar; + fn main() { + bar::newfunc(); + }", + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) + .file("bar/src/lib.rs", "pub fn newfunc() {}") + .build(); + + // Check that it works with the patched crate. + p.cargo("build").run(); + + // Check that verify fails with patched crate which has new functionality. + p.cargo("publish --index") + .arg(registry_url().to_string()) + .with_stderr_contains("[..]newfunc[..]") + .with_status(101) + .run(); + + // Remove the usage of new functionality and try again. + p.change_file("src/main.rs", "extern crate bar; pub fn main() {}"); + + p.cargo("publish --index") + .arg(registry_url().to_string()) + .run(); + + // Note, use of `registry` in the deps here is an artifact that this + // publishes to a fake, local registry that is pretending to be crates.io. + // Normal publishes would set it to null. + publish::validate_upload( + r#" + { + "authors": [], + "badges": {}, + "categories": [], + "deps": [ + { + "default_features": true, + "features": [], + "kind": "normal", + "name": "bar", + "optional": false, + "registry": "https://github.com/rust-lang/crates.io-index", + "target": null, + "version_req": "^1.0" + } + ], + "description": "foo", + "documentation": null, + "features": {}, + "homepage": null, + "keywords": [], + "license": "MIT", + "license_file": null, + "links": null, + "name": "foo", + "readme": null, + "readme_file": null, + "repository": null, + "vers": "0.0.1" + } + "#, + "foo-0.0.1.crate", + &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], + ); +} + +#[cargo_test] +fn publish_checks_for_token_before_verify() { + registry::init(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + let credentials = paths::home().join(".cargo/credentials"); + fs::remove_file(&credentials).unwrap(); + + // Assert upload token error before the package is verified + p.cargo("publish") + .with_status(101) + .with_stderr_contains("[ERROR] no upload token found, please run `cargo login`") + .with_stderr_does_not_contain("[VERIFYING] foo v0.0.1 ([CWD])") + .run(); + + // Assert package verified successfully on dry run + p.cargo("publish --dry-run") + .with_status(0) + .with_stderr_contains("[VERIFYING] foo v0.0.1 ([CWD])") + .run(); +} diff --git a/tests/testsuite/publish_lockfile.rs b/tests/testsuite/publish_lockfile.rs new file mode 100644 index 00000000000..201e7959cb8 --- /dev/null +++ b/tests/testsuite/publish_lockfile.rs @@ -0,0 +1,398 @@ +use std; +use std::fs::File; + +use crate::support::registry::Package; +use crate::support::{ + basic_manifest, cargo_process, git, paths, project, publish::validate_crate_contents, +}; + +fn pl_manifest(name: &str, version: &str, extra: &str) -> String { + format!( + r#" + [package] + name = "{}" + version = "{}" + authors = [] + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + + {} + "#, + name, version, extra + ) +} + +#[cargo_test] +fn deprecated() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["publish-lockfile"] + [package] + name = "foo" + version = "0.1.0" + publish-lockfile = true + license = "MIT" + description = "foo" + documentation = "foo" + homepage = "foo" + repository = "foo" + "#, + ) + .file("src/lib.rs", "") + .build(); + p.cargo("package") + .masquerade_as_nightly_cargo() + .with_stderr( + "\ +[PACKAGING] foo v0.1.0 ([..]) +[VERIFYING] foo v0.1.0 ([..]) +[WARNING] The `publish-lockfile` feature is deprecated and currently has no effect. \ + It may be removed in a future version. +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [..] +", + ) + .run(); +} + +#[cargo_test] +fn package_lockfile() { + let p = project() + .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("package") + .with_stderr( + "\ +[PACKAGING] foo v0.0.1 ([CWD]) +[VERIFYING] foo v0.0.1 ([CWD]) +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); + p.cargo("package -l") + .with_stdout( + "\ +Cargo.lock +Cargo.toml +src/main.rs +", + ) + .run(); + p.cargo("package").with_stdout("").run(); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + validate_crate_contents( + f, + "foo-0.0.1.crate", + &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"], + &[], + ); +} + +#[cargo_test] +fn package_lockfile_git_repo() { + // Create a Git repository containing a minimal Rust project. + let g = git::repo(&paths::root().join("foo")) + .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) + .file("src/main.rs", "fn main() {}") + .build(); + cargo_process("package -l") + .cwd(g.root()) + .with_stdout( + "\ +.cargo_vcs_info.json +Cargo.lock +Cargo.toml +src/main.rs +", + ) + .run(); + cargo_process("package -v") + .cwd(g.root()) + .with_stderr( + "\ +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] Cargo.toml +[ARCHIVING] src/main.rs +[ARCHIVING] .cargo_vcs_info.json +[ARCHIVING] Cargo.lock +[VERIFYING] foo v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc --crate-name foo src/main.rs [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn no_lock_file_with_library() { + let p = project() + .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) + .file("src/lib.rs", "") + .build(); + + p.cargo("package").run(); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + validate_crate_contents( + f, + "foo-0.0.1.crate", + &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], + &[], + ); +} + +#[cargo_test] +fn lock_file_and_workspace() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + "#, + ) + .file("foo/Cargo.toml", &pl_manifest("foo", "0.0.1", "")) + .file("foo/src/main.rs", "fn main() {}") + .build(); + + p.cargo("package").cwd("foo").run(); + + let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); + validate_crate_contents( + f, + "foo-0.0.1.crate", + &["Cargo.toml", "Cargo.toml.orig", "src/main.rs", "Cargo.lock"], + &[], + ); +} + +#[cargo_test] +fn note_resolve_changes() { + // `multi` has multiple sources (path and registry). + Package::new("mutli", "0.1.0").publish(); + // `updated` is always from registry, but should not change. + Package::new("updated", "1.0.0").publish(); + // `patched` is [patch]ed. + Package::new("patched", "1.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + &pl_manifest( + "foo", + "0.0.1", + r#" + [dependencies] + mutli = { path = "mutli", version = "0.1" } + updated = "1.0" + patched = "1.0" + + [patch.crates-io] + patched = { path = "patched" } + "#, + ), + ) + .file("src/main.rs", "fn main() {}") + .file("mutli/Cargo.toml", &basic_manifest("mutli", "0.1.0")) + .file("mutli/src/lib.rs", "") + .file("patched/Cargo.toml", &basic_manifest("patched", "1.0.0")) + .file("patched/src/lib.rs", "") + .build(); + + p.cargo("generate-lockfile").run(); + + // Make sure this does not change or warn. + Package::new("updated", "1.0.1").publish(); + + p.cargo("package --no-verify -v --allow-dirty") + .with_stderr_unordered( + "\ +[PACKAGING] foo v0.0.1 ([..]) +[ARCHIVING] Cargo.toml +[ARCHIVING] src/main.rs +[ARCHIVING] Cargo.lock +[UPDATING] `[..]` index +[NOTE] package `mutli v0.1.0` added to the packaged Cargo.lock file, was originally sourced from `[..]/foo/mutli` +[NOTE] package `patched v1.0.0` added to the packaged Cargo.lock file, was originally sourced from `[..]/foo/patched` +", + ) + .run(); +} + +#[cargo_test] +fn outdated_lock_version_change_does_not_warn() { + // If the version of the package being packaged changes, but Cargo.lock is + // not updated, don't bother warning about it. + let p = project() + .file("Cargo.toml", &pl_manifest("foo", "0.1.0", "")) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("generate-lockfile").run(); + + p.change_file("Cargo.toml", &pl_manifest("foo", "0.2.0", "")); + + p.cargo("package --no-verify") + .with_stderr("[PACKAGING] foo v0.2.0 ([..])") + .run(); +} + +#[cargo_test] +fn no_warn_workspace_extras() { + // Other entries in workspace lock file should be ignored. + Package::new("dep1", "1.0.0").publish(); + Package::new("dep2", "1.0.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + &pl_manifest( + "a", + "0.1.0", + r#" + [dependencies] + dep1 = "1.0" + "#, + ), + ) + .file("a/src/main.rs", "fn main() {}") + .file( + "b/Cargo.toml", + &pl_manifest( + "b", + "0.1.0", + r#" + [dependencies] + dep2 = "1.0" + "#, + ), + ) + .file("b/src/main.rs", "fn main() {}") + .build(); + p.cargo("generate-lockfile").run(); + p.cargo("package --no-verify") + .cwd("a") + .with_stderr( + "\ +[PACKAGING] a v0.1.0 ([..]) +[UPDATING] `[..]` index +", + ) + .run(); +} + +#[cargo_test] +fn warn_package_with_yanked() { + Package::new("bar", "0.1.0").publish(); + let p = project() + .file( + "Cargo.toml", + &pl_manifest( + "foo", + "0.0.1", + r#" + [dependencies] + bar = "0.1" + "#, + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + p.cargo("generate-lockfile").run(); + Package::new("bar", "0.1.0").yanked(true).publish(); + // Make sure it sticks with the locked (yanked) version. + Package::new("bar", "0.1.1").publish(); + p.cargo("package --no-verify") + .with_stderr( + "\ +[PACKAGING] foo v0.0.1 ([..]) +[UPDATING] `[..]` index +[WARNING] package `bar v0.1.0` in Cargo.lock is yanked in registry \ + `crates.io`, consider updating to a version that is not yanked +", + ) + .run(); +} + +#[cargo_test] +fn warn_install_with_yanked() { + Package::new("bar", "0.1.0").yanked(true).publish(); + Package::new("bar", "0.1.1").publish(); + Package::new("foo", "0.1.0") + .dep("bar", "0.1") + .file("src/main.rs", "fn main() {}") + .file( + "Cargo.lock", + r#" +[[package]] +name = "bar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "foo" +version = "0.1.0" +dependencies = [ + "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + "#, + ) + .publish(); + + cargo_process("install --locked foo") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] foo v0.1.0 (registry `[..]`) +[INSTALLING] foo v0.1.0 +[WARNING] package `bar v0.1.0` in Cargo.lock is yanked in registry \ + `crates.io`, consider running without --locked +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 (registry `[..]`) +[COMPILING] bar v0.1.0 +[COMPILING] foo v0.1.0 +[FINISHED] release [optimized] target(s) in [..] +[INSTALLING] [..]/.cargo/bin/foo[EXE] +[INSTALLED] package `foo v0.1.0` (executable `foo[EXE]`) +[WARNING] be sure to add [..] +", + ) + .run(); + + // Try again without --locked, make sure it uses 0.1.1 and does not warn. + cargo_process("install --force foo") + .with_stderr( + "\ +[UPDATING] `[..]` index +[INSTALLING] foo v0.1.0 +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.1 (registry `[..]`) +[COMPILING] bar v0.1.1 +[COMPILING] foo v0.1.0 +[FINISHED] release [optimized] target(s) in [..] +[REPLACING] [..]/.cargo/bin/foo[EXE] +[REPLACED] package `foo v0.1.0` with `foo v0.1.0` (executable `foo[EXE]`) +[WARNING] be sure to add [..] +", + ) + .run(); +} diff --git a/tests/testsuite/read_manifest.rs b/tests/testsuite/read_manifest.rs new file mode 100644 index 00000000000..61e520f1499 --- /dev/null +++ b/tests/testsuite/read_manifest.rs @@ -0,0 +1,105 @@ +use crate::support::{basic_bin_manifest, main_file, project}; + +static MANIFEST_OUTPUT: &str = r#" +{ + "authors": [ + "wycats@example.com" + ], + "categories": [], + "name":"foo", + "readme": null, + "repository": null, + "version":"0.5.0", + "id":"foo[..]0.5.0[..](path+file://[..]/foo)", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "description": null, + "edition": "2015", + "source":null, + "dependencies":[], + "targets":[{ + "kind":["bin"], + "crate_types":["bin"], + "doctest": false, + "edition": "2015", + "name":"foo", + "src_path":"[..]/foo/src/foo.rs" + }], + "features":{}, + "manifest_path":"[..]Cargo.toml", + "metadata": null +}"#; + +#[cargo_test] +fn cargo_read_manifest_path_to_cargo_toml_relative() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("read-manifest --manifest-path foo/Cargo.toml") + .cwd(p.root().parent().unwrap()) + .with_json(MANIFEST_OUTPUT) + .run(); +} + +#[cargo_test] +fn cargo_read_manifest_path_to_cargo_toml_absolute() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("read-manifest --manifest-path") + .arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()) + .with_json(MANIFEST_OUTPUT) + .run(); +} + +#[cargo_test] +fn cargo_read_manifest_path_to_cargo_toml_parent_relative() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("read-manifest --manifest-path foo") + .cwd(p.root().parent().unwrap()) + .with_status(101) + .with_stderr( + "[ERROR] the manifest-path must be \ + a path to a Cargo.toml file", + ) + .run(); +} + +#[cargo_test] +fn cargo_read_manifest_path_to_cargo_toml_parent_absolute() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("read-manifest --manifest-path") + .arg(p.root()) + .cwd(p.root().parent().unwrap()) + .with_status(101) + .with_stderr( + "[ERROR] the manifest-path must be \ + a path to a Cargo.toml file", + ) + .run(); +} + +#[cargo_test] +fn cargo_read_manifest_cwd() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("read-manifest").with_json(MANIFEST_OUTPUT).run(); +} diff --git a/tests/testsuite/registry.rs b/tests/testsuite/registry.rs new file mode 100644 index 00000000000..9fb92d80f65 --- /dev/null +++ b/tests/testsuite/registry.rs @@ -0,0 +1,2029 @@ +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::Path; + +use crate::support::cargo_process; +use crate::support::git; +use crate::support::paths::{self, CargoPathExt}; +use crate::support::registry::{self, registry_path, registry_url, Dependency, Package}; +use crate::support::{basic_manifest, project}; +use cargo::util::paths::remove_dir_all; + +#[cargo_test] +fn simple() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry_path().to_str().unwrap() + )) + .run(); + + p.cargo("clean").run(); + + // Don't download a second time + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + + p.cargo("build") + .with_stderr(&format!( + "\ +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry_path().to_str().unwrap() + )) + .run(); +} + +#[cargo_test] +fn nonexistent() { + Package::new("init", "0.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + nonexistent = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +error: no matching package named `nonexistent` found +location searched: registry [..] +required by package `foo v0.0.1 ([..])` +", + ) + .run(); +} + +#[cargo_test] +fn wrong_case() { + Package::new("init", "0.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + Init = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // #5678 to make this work + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +error: no matching package named `Init` found +location searched: registry [..] +perhaps you meant: init +required by package `foo v0.0.1 ([..])` +", + ) + .run(); +} + +#[cargo_test] +fn mis_hyphenated() { + Package::new("mis-hyphenated", "0.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + mis_hyphenated = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + // #2775 to make this work + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +error: no matching package named `mis_hyphenated` found +location searched: registry [..] +perhaps you meant: mis-hyphenated +required by package `foo v0.0.1 ([..])` +", + ) + .run(); +} + +#[cargo_test] +fn wrong_version() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + foo = ">= 1.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.0.1").publish(); + Package::new("foo", "0.0.2").publish(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +error: failed to select a version for the requirement `foo = \">= 1.0.0\"` + candidate versions found which didn't match: 0.0.2, 0.0.1 + location searched: `[..]` index (which is replacing registry `[..]`) +required by package `foo v0.0.1 ([..])` +", + ) + .run(); + + Package::new("foo", "0.0.3").publish(); + Package::new("foo", "0.0.4").publish(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +error: failed to select a version for the requirement `foo = \">= 1.0.0\"` + candidate versions found which didn't match: 0.0.4, 0.0.3, 0.0.2, ... + location searched: `[..]` index (which is replacing registry `[..]`) +required by package `foo v0.0.1 ([..])` +", + ) + .run(); +} + +#[cargo_test] +fn bad_cksum() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bad-cksum = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + let pkg = Package::new("bad-cksum", "0.0.1"); + pkg.publish(); + t!(File::create(&pkg.archive_dst())); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] index +[DOWNLOADING] crates ... +[DOWNLOADED] bad-cksum [..] +[ERROR] failed to download replaced source registry `https://[..]` + +Caused by: + failed to verify the checksum of `bad-cksum v0.0.1 (registry `[ROOT][..]`)` +", + ) + .run(); +} + +#[cargo_test] +fn update_registry() { + Package::new("init", "0.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + notyet = ">= 0.0.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +error: no matching package named `notyet` found +location searched: registry `[..]` +required by package `foo v0.0.1 ([..])` +", + ) + .run(); + + Package::new("notyet", "0.0.1").publish(); + + p.cargo("build") + .with_stderr(format!( + "\ +[UPDATING] `{reg}` index +[DOWNLOADING] crates ... +[DOWNLOADED] notyet v0.0.1 (registry `[ROOT][..]`) +[COMPILING] notyet v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + reg = registry_path().to_str().unwrap() + )) + .run(); +} + +#[cargo_test] +fn package_with_path_deps() { + Package::new("init", "0.0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + + [dependencies.notyet] + version = "0.0.1" + path = "notyet" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("notyet/Cargo.toml", &basic_manifest("notyet", "0.0.1")) + .file("notyet/src/lib.rs", "") + .build(); + + p.cargo("package -v") + .with_status(101) + .with_stderr_contains( + "\ +[ERROR] failed to prepare local package for uploading + +Caused by: + no matching package named `notyet` found +location searched: registry [..] +required by package `foo v0.0.1 ([..])` +", + ) + .run(); + + Package::new("notyet", "0.0.1").publish(); + + p.cargo("package") + .with_stderr( + "\ +[PACKAGING] foo v0.0.1 ([CWD]) +[UPDATING] `[..]` index +[VERIFYING] foo v0.0.1 ([CWD]) +[DOWNLOADING] crates ... +[DOWNLOADED] notyet v0.0.1 (registry `[ROOT][..]`) +[COMPILING] notyet v0.0.1 +[COMPILING] foo v0.0.1 ([CWD][..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn lockfile_locks() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); + + p.root().move_into_the_past(); + Package::new("bar", "0.0.2").publish(); + + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn lockfile_locks_transitively() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); + + p.root().move_into_the_past(); + Package::new("baz", "0.0.2").publish(); + Package::new("bar", "0.0.2").dep("baz", "*").publish(); + + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn yanks_are_not_used() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("baz", "0.0.2").yanked(true).publish(); + Package::new("bar", "0.0.1").dep("baz", "*").publish(); + Package::new("bar", "0.0.2") + .dep("baz", "*") + .yanked(true) + .publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn relying_on_a_yank_is_bad() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("baz", "0.0.2").yanked(true).publish(); + Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains( + "\ +error: failed to select a version for the requirement `baz = \"= 0.0.2\"` + candidate versions found which didn't match: 0.0.1 + location searched: `[..]` index (which is replacing registry `[..]`) +required by package `bar v0.0.1` + ... which is depended on by `foo [..]` +", + ) + .run(); +} + +#[cargo_test] +fn yanks_in_lockfiles_are_ok() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + p.cargo("build").run(); + + registry_path().join("3").rm_rf(); + + Package::new("bar", "0.0.1").yanked(true).publish(); + + p.cargo("build").with_stdout("").run(); + + p.cargo("update") + .with_status(101) + .with_stderr_contains( + "\ +error: no matching package named `bar` found +location searched: registry [..] +required by package `foo v0.0.1 ([..])` +", + ) + .run(); +} + +#[cargo_test] +fn yanks_in_lockfiles_are_ok_for_other_update() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + baz = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + Package::new("baz", "0.0.1").publish(); + + p.cargo("build").run(); + + registry_path().join("3").rm_rf(); + + Package::new("bar", "0.0.1").yanked(true).publish(); + Package::new("baz", "0.0.1").publish(); + + p.cargo("build").with_stdout("").run(); + + Package::new("baz", "0.0.2").publish(); + + p.cargo("update") + .with_status(101) + .with_stderr_contains( + "\ +error: no matching package named `bar` found +location searched: registry [..] +required by package `foo v0.0.1 ([..])` +", + ) + .run(); + + p.cargo("update -p baz") + .with_stderr_contains( + "\ +[UPDATING] `[..]` index +[UPDATING] baz v0.0.1 -> v0.0.2 +", + ) + .run(); +} + +#[cargo_test] +fn yanks_in_lockfiles_are_ok_with_new_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + + p.cargo("build").run(); + + registry_path().join("3").rm_rf(); + + Package::new("bar", "0.0.1").yanked(true).publish(); + Package::new("baz", "0.0.1").publish(); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + baz = "*" + "# + )); + + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn update_with_lockfile_if_packages_missing() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.0.1").publish(); + p.cargo("build").run(); + p.root().move_into_the_past(); + + paths::home().join(".cargo/registry").rm_rf(); + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn update_lockfile() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + println!("0.0.1"); + Package::new("bar", "0.0.1").publish(); + p.cargo("build").run(); + + Package::new("bar", "0.0.2").publish(); + Package::new("bar", "0.0.3").publish(); + paths::home().join(".cargo/registry").rm_rf(); + println!("0.0.2 update"); + p.cargo("update -p bar --precise 0.0.2") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] bar v0.0.1 -> v0.0.2 +", + ) + .run(); + + println!("0.0.2 build"); + p.cargo("build") + .with_stderr( + "\ +[DOWNLOADING] crates ... +[DOWNLOADED] [..] v0.0.2 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.2 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); + + println!("0.0.3 update"); + p.cargo("update -p bar") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] bar v0.0.2 -> v0.0.3 +", + ) + .run(); + + println!("0.0.3 build"); + p.cargo("build") + .with_stderr( + "\ +[DOWNLOADING] crates ... +[DOWNLOADED] [..] v0.0.3 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.3 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); + + println!("new dependencies update"); + Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish(); + Package::new("spam", "0.2.5").publish(); + p.cargo("update -p bar") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] bar v0.0.3 -> v0.0.4 +[ADDING] spam v0.2.5 +", + ) + .run(); + + println!("new dependencies update"); + Package::new("bar", "0.0.5").publish(); + p.cargo("update -p bar") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] bar v0.0.4 -> v0.0.5 +[REMOVING] spam v0.2.5 +", + ) + .run(); +} + +#[cargo_test] +fn dev_dependency_not_used() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("baz", "0.0.1").publish(); + Package::new("bar", "0.0.1").dev_dep("baz", "*").publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] [..] v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn login_with_no_cargo_dir() { + // Create a config in the root directory because `login` requires the + // index to be updated, and we don't want to hit crates.io. + registry::init(); + fs::rename(paths::home().join(".cargo"), paths::root().join(".cargo")).unwrap(); + paths::home().rm_rf(); + cargo_process("login foo -v").run(); + let credentials = fs::read_to_string(paths::home().join(".cargo/credentials")).unwrap(); + assert_eq!(credentials, "[registry]\ntoken = \"foo\"\n"); +} + +#[cargo_test] +fn login_with_differently_sized_token() { + // Verify that the configuration file gets properly truncated. + registry::init(); + let credentials = paths::home().join(".cargo/credentials"); + fs::remove_file(&credentials).unwrap(); + cargo_process("login lmaolmaolmao -v").run(); + cargo_process("login lmao -v").run(); + cargo_process("login lmaolmaolmao -v").run(); + let credentials = fs::read_to_string(&credentials).unwrap(); + assert_eq!(credentials, "[registry]\ntoken = \"lmaolmaolmao\"\n"); +} + +#[cargo_test] +fn bad_license_file() { + Package::new("foo", "1.0.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license-file = "foo" + description = "bar" + repository = "baz" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + p.cargo("publish -v --index") + .arg(registry_url().to_string()) + .with_status(101) + .with_stderr_contains("[ERROR] the license file `foo` does not exist") + .run(); +} + +#[cargo_test] +fn updating_a_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.a] + path = "a" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + Package::new("bar", "0.0.1").publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 (registry `[ROOT][..]`) +[COMPILING] bar v0.0.1 +[COMPILING] a v0.0.1 ([CWD]/a) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); + + t!(t!(File::create(&p.root().join("a/Cargo.toml"))).write_all( + br#" + [project] + name = "a" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "0.1.0" + "# + )); + Package::new("bar", "0.1.0").publish(); + + println!("second"); + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 (registry `[ROOT][..]`) +[COMPILING] bar v0.1.0 +[COMPILING] a v0.0.1 ([CWD]/a) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn git_and_registry_dep() { + let b = git::repo(&paths::root().join("b")) + .file( + "Cargo.toml", + r#" + [project] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies] + a = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = "0.0.1" + + [dependencies.b] + git = '{}' + "#, + b.url() + ), + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.0.1").publish(); + + p.root().move_into_the_past(); + p.cargo("build") + .with_stderr( + "\ +[UPDATING] [..] +[UPDATING] [..] +[DOWNLOADING] crates ... +[DOWNLOADED] a v0.0.1 (registry `[ROOT][..]`) +[COMPILING] a v0.0.1 +[COMPILING] b v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); + p.root().move_into_the_past(); + + println!("second"); + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn update_publish_then_update() { + // First generate a Cargo.lock and a clone of the registry index at the + // "head" of the current registry. + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + Package::new("a", "0.1.0").publish(); + p.cargo("build").run(); + + // Next, publish a new package and back up the copy of the registry we just + // created. + Package::new("a", "0.1.1").publish(); + let registry = paths::home().join(".cargo/registry"); + let backup = paths::root().join("registry-backup"); + t!(fs::rename(®istry, &backup)); + + // Generate a Cargo.lock with the newer version, and then move the old copy + // of the registry back into place. + let p2 = project() + .at("foo2") + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + p2.cargo("build").run(); + registry.rm_rf(); + t!(fs::rename(&backup, ®istry)); + t!(fs::rename( + p2.root().join("Cargo.lock"), + p.root().join("Cargo.lock") + )); + + // Finally, build the first project again (with our newer Cargo.lock) which + // should force an update of the old registry, download the new crate, and + // then build everything again. + p.cargo("build") + .with_stderr( + "\ +[UPDATING] [..] +[DOWNLOADING] crates ... +[DOWNLOADED] a v0.1.1 (registry `[ROOT][..]`) +[COMPILING] a v0.1.1 +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn fetch_downloads() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").publish(); + + p.cargo("fetch") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] a v0.1.0 (registry [..]) +", + ) + .run(); +} + +#[cargo_test] +fn update_transitive_dependency() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "0.1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").dep("b", "*").publish(); + Package::new("b", "0.1.0").publish(); + + p.cargo("fetch").run(); + + Package::new("b", "0.1.1").publish(); + + p.cargo("update -pb") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] b v0.1.0 -> v0.1.1 +", + ) + .run(); + + p.cargo("build") + .with_stderr( + "\ +[DOWNLOADING] crates ... +[DOWNLOADED] b v0.1.1 (registry `[ROOT][..]`) +[COMPILING] b v0.1.1 +[COMPILING] a v0.1.0 +[COMPILING] foo v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn update_backtracking_ok() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + webdriver = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("webdriver", "0.1.0") + .dep("hyper", "0.6") + .publish(); + Package::new("hyper", "0.6.5") + .dep("openssl", "0.1") + .dep("cookie", "0.1") + .publish(); + Package::new("cookie", "0.1.0") + .dep("openssl", "0.1") + .publish(); + Package::new("openssl", "0.1.0").publish(); + + p.cargo("generate-lockfile").run(); + + Package::new("openssl", "0.1.1").publish(); + Package::new("hyper", "0.6.6") + .dep("openssl", "0.1.1") + .dep("cookie", "0.1.0") + .publish(); + + p.cargo("update -p hyper") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] hyper v0.6.5 -> v0.6.6 +[UPDATING] openssl v0.1.0 -> v0.1.1 +", + ) + .run(); +} + +#[cargo_test] +fn update_multiple_packages() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "*" + b = "*" + c = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("a", "0.1.0").publish(); + Package::new("b", "0.1.0").publish(); + Package::new("c", "0.1.0").publish(); + + p.cargo("fetch").run(); + + Package::new("a", "0.1.1").publish(); + Package::new("b", "0.1.1").publish(); + Package::new("c", "0.1.1").publish(); + + p.cargo("update -pa -pb") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] a v0.1.0 -> v0.1.1 +[UPDATING] b v0.1.0 -> v0.1.1 +", + ) + .run(); + + p.cargo("update -pb -pc") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] c v0.1.0 -> v0.1.1 +", + ) + .run(); + + p.cargo("build") + .with_stderr_contains("[DOWNLOADED] a v0.1.1 (registry `[ROOT][..]`)") + .with_stderr_contains("[DOWNLOADED] b v0.1.1 (registry `[ROOT][..]`)") + .with_stderr_contains("[DOWNLOADED] c v0.1.1 (registry `[ROOT][..]`)") + .with_stderr_contains("[COMPILING] a v0.1.1") + .with_stderr_contains("[COMPILING] b v0.1.1") + .with_stderr_contains("[COMPILING] c v0.1.1") + .with_stderr_contains("[COMPILING] foo v0.5.0 ([..])") + .run(); +} + +#[cargo_test] +fn bundled_crate_in_registry() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "0.1" + baz = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0") + .dep("bar", "0.1.0") + .file( + "Cargo.toml", + r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar", version = "0.1.0" } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "") + .publish(); + + p.cargo("run").run(); +} + +#[cargo_test] +fn update_same_prefix_oh_my_how_was_this_a_bug() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "ugh" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.1" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foobar", "0.2.0").publish(); + Package::new("foo", "0.1.0") + .dep("foobar", "0.2.0") + .publish(); + + p.cargo("generate-lockfile").run(); + p.cargo("update -pfoobar --precise=0.2.0").run(); +} + +#[cargo_test] +fn use_semver() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "1.2.3-alpha.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "1.2.3-alpha.0").publish(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn only_download_relevant() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [target.foo.dependencies] + foo = "*" + [dev-dependencies] + bar = "*" + [dependencies] + baz = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0").publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] baz v0.1.0 ([..]) +[COMPILING] baz v0.1.0 +[COMPILING] bar v0.5.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s +", + ) + .run(); +} + +#[cargo_test] +fn resolve_and_backtracking() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.1") + .feature_dep("bar", "0.1", &["a", "b"]) + .publish(); + Package::new("foo", "0.1.0").publish(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn upstream_warnings_on_extra_verbose() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("foo", "0.1.0") + .file("src/lib.rs", "fn unused() {}") + .publish(); + + p.cargo("build -vv") + .with_stderr_contains("[..]warning: function is never used[..]") + .run(); +} + +#[cargo_test] +fn disallow_network() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --frozen") + .with_status(101) + .with_stderr( + "\ +error: failed to load source for a dependency on `foo` + +Caused by: + Unable to update registry [..] + +Caused by: + attempting to make an HTTP request, but --frozen was specified +", + ) + .run(); +} + +#[cargo_test] +fn add_dep_dont_update_registry() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + Package::new("remote", "0.3.4").publish(); + + p.cargo("build").run(); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + remote = "0.3" + "# + )); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.5.0 ([..]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn bump_version_dont_update_registry() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#, + ) + .file("baz/src/lib.rs", "") + .build(); + + Package::new("remote", "0.3.4").publish(); + + p.cargo("build").run(); + + t!(t!(File::create(p.root().join("Cargo.toml"))).write_all( + br#" + [project] + name = "bar" + version = "0.6.0" + authors = [] + + [dependencies] + baz = { path = "baz" } + "# + )); + + p.cargo("build") + .with_stderr( + "\ +[COMPILING] bar v0.6.0 ([..]) +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn old_version_req() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.2*" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("remote", "0.2.0").publish(); + + p.cargo("build") + .with_stderr( + "\ +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of bar 0.5.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of bar 0.5.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +[UPDATING] [..] +[DOWNLOADING] crates ... +[DOWNLOADED] [..] +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn old_version_req_upstream() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + remote = "0.3" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + Package::new("remote", "0.3.0") + .file( + "Cargo.toml", + r#" + [project] + name = "remote" + version = "0.3.0" + authors = [] + + [dependencies] + bar = "0.2*" + "#, + ) + .file("src/lib.rs", "") + .publish(); + Package::new("bar", "0.2.0").publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] [..] +[DOWNLOADING] crates ... +[DOWNLOADED] [..] +warning: parsed version requirement `0.2*` is no longer valid + +Previous versions of Cargo accepted this malformed requirement, +but it is being deprecated. This was found when parsing the manifest +of remote 0.3.0, and the correct version requirement is `0.2.*`. + +This will soon become a hard error, so it's either recommended to +update to a fixed version or contact the upstream maintainer about +this warning. + +[COMPILING] [..] +[COMPILING] [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn toml_lies_but_index_is_truth() { + Package::new("foo", "0.2.0").publish(); + Package::new("bar", "0.3.0") + .dep("foo", "0.2.0") + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.3.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#, + ) + .file("src/lib.rs", "extern crate foo;") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "0.3" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn vv_prints_warnings() { + Package::new("foo", "0.2.0") + .file( + "src/lib.rs", + "#![deny(warnings)] fn foo() {} // unused function", + ) + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "fo" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.2" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -vv").run(); +} + +#[cargo_test] +fn bad_and_or_malicious_packages_rejected() { + Package::new("foo", "0.2.0") + .extra_file("foo-0.1.0/src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "fo" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.2" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -vv") + .with_status(101) + .with_stderr( + "\ +[UPDATING] [..] +[DOWNLOADING] crates ... +[DOWNLOADED] [..] +error: failed to download [..] + +Caused by: + failed to unpack [..] + +Caused by: + [..] contains a file at \"foo-0.1.0/src/lib.rs\" which isn't under \"foo-0.2.0\" +", + ) + .run(); +} + +#[cargo_test] +fn git_init_templatedir_missing() { + Package::new("foo", "0.2.0").dep("bar", "*").publish(); + Package::new("bar", "0.2.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "fo" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.2" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); + + remove_dir_all(paths::home().join(".cargo/registry")).unwrap(); + File::create(paths::home().join(".gitconfig")) + .unwrap() + .write_all( + br#" + [init] + templatedir = nowhere + "#, + ) + .unwrap(); + + p.cargo("build").run(); + p.cargo("build").run(); +} + +#[cargo_test] +fn rename_deps_and_features() { + Package::new("foo", "0.1.0") + .file("src/lib.rs", "pub fn f1() {}") + .publish(); + Package::new("foo", "0.2.0") + .file("src/lib.rs", "pub fn f2() {}") + .publish(); + Package::new("bar", "0.2.0") + .add_dep( + Dependency::new("foo01", "0.1.0") + .package("foo") + .optional(true), + ) + .add_dep(Dependency::new("foo02", "0.2.0").package("foo")) + .feature("another", &["foo01"]) + .file( + "src/lib.rs", + r#" + extern crate foo02; + #[cfg(feature = "foo01")] + extern crate foo01; + + pub fn foo() { + foo02::f2(); + #[cfg(feature = "foo01")] + foo01::f1(); + } + "#, + ) + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + + [dependencies] + bar = "0.2" + "#, + ) + .file( + "src/main.rs", + " + extern crate bar; + fn main() { bar::foo(); } + ", + ) + .build(); + + p.cargo("build").run(); + p.cargo("build --features bar/foo01").run(); + p.cargo("build --features bar/another").run(); +} + +#[cargo_test] +fn ignore_invalid_json_lines() { + Package::new("foo", "0.1.0").publish(); + Package::new("foo", "0.1.1").invalid_json(true).publish(); + Package::new("foo", "0.2.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + + [dependencies] + foo = '0.1.0' + foo02 = { version = '0.2.0', package = 'foo' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn readonly_registry_still_works() { + Package::new("foo", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.5.0" + authors = [] + + [dependencies] + foo = '0.1.0' + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("generate-lockfile").run(); + p.cargo("fetch --locked").run(); + chmod_readonly(&paths::home(), true); + p.cargo("build").run(); + // make sure we un-readonly the files afterwards so "cargo clean" can remove them (#6934) + chmod_readonly(&paths::home(), false); + + fn chmod_readonly(path: &Path, readonly: bool) { + for entry in t!(path.read_dir()) { + let entry = t!(entry); + let path = entry.path(); + if t!(entry.file_type()).is_dir() { + chmod_readonly(&path, readonly); + } else { + set_readonly(&path, readonly); + } + } + set_readonly(path, readonly); + } + + fn set_readonly(path: &Path, readonly: bool) { + let mut perms = t!(path.metadata()).permissions(); + perms.set_readonly(readonly); + t!(fs::set_permissions(path, perms)); + } +} diff --git a/tests/testsuite/rename_deps.rs b/tests/testsuite/rename_deps.rs new file mode 100644 index 00000000000..fc4858163f2 --- /dev/null +++ b/tests/testsuite/rename_deps.rs @@ -0,0 +1,388 @@ +use crate::support::git; +use crate::support::paths; +use crate::support::registry::Package; +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn rename_dependency() { + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.2.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { version = "0.1.0" } + baz = { version = "0.2.0", package = "bar" } + "#, + ) + .file("src/lib.rs", "extern crate bar; extern crate baz;") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn rename_with_different_names() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + baz = { path = "bar", package = "bar" } + "#, + ) + .file("src/lib.rs", "extern crate baz;") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "random_name" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn lots_of_names() { + Package::new("foo", "0.1.0") + .file("src/lib.rs", "pub fn foo1() {}") + .publish(); + Package::new("foo", "0.2.0") + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("foo", "0.1.0") + .file("src/lib.rs", "pub fn foo2() {}") + .alternative(true) + .publish(); + + let g = git::repo(&paths::root().join("another")) + .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("src/lib.rs", "pub fn foo3() {}") + .build(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.2" + foo1 = {{ version = "0.1", package = "foo" }} + foo2 = {{ version = "0.1", registry = "alternative", package = "foo" }} + foo3 = {{ git = '{}', package = "foo" }} + foo4 = {{ path = "foo", package = "foo" }} + "#, + g.url() + ), + ) + .file( + "src/lib.rs", + " + extern crate foo; + extern crate foo1; + extern crate foo2; + extern crate foo3; + extern crate foo4; + + pub fn foo() { + foo::foo(); + foo1::foo1(); + foo2::foo2(); + foo3::foo3(); + foo4::foo4(); + } + ", + ) + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", "pub fn foo4() {}") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn rename_and_patch() { + Package::new("foo", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", package = "foo" } + + [patch.crates-io] + foo = { path = "foo" } + "#, + ) + .file( + "src/lib.rs", + "extern crate bar; pub fn foo() { bar::foo(); }", + ) + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", "pub fn foo() {}") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn rename_twice() { + Package::new("foo", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", package = "foo" } + [build-dependencies] + foo = { version = "0.1" } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] foo v0.1.0 (registry [..]) +error: the crate `test v0.1.0 ([CWD])` depends on crate `foo v0.1.0` multiple times with different names +", + ) + .run(); +} + +#[cargo_test] +fn rename_affects_fingerprint() { + Package::new("foo", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { version = "0.1", package = "foo" } + "#, + ) + .file("src/lib.rs", "extern crate foo;") + .build(); + + p.cargo("build -v").run(); + + p.change_file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { version = "0.1", package = "foo" } + "#, + ); + + p.cargo("build -v") + .with_status(101) + .with_stderr_contains("[..]can't find crate for `foo`") + .run(); +} + +#[cargo_test] +fn can_run_doc_tests() { + Package::new("bar", "0.1.0").publish(); + Package::new("bar", "0.2.0").publish(); + + let foo = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { version = "0.1.0" } + baz = { version = "0.2.0", package = "bar" } + "#, + ) + .file( + "src/lib.rs", + " + extern crate bar; + extern crate baz; + ", + ) + .build(); + + foo.cargo("test -v") + .with_stderr_contains( + "\ +[DOCTEST] foo +[RUNNING] `rustdoc --test [CWD]/src/lib.rs \ + [..] \ + --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib \ + --extern baz=[CWD]/target/debug/deps/libbar-[..].rlib \ + [..]` +", + ) + .run(); +} + +#[cargo_test] +fn features_still_work() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + p1 = { path = 'a', features = ['b'] } + p2 = { path = 'b' } + "#, + ) + .file("src/lib.rs", "") + .file( + "a/Cargo.toml", + r#" + [package] + name = "p1" + version = "0.1.0" + authors = [] + + [dependencies] + b = { version = "0.1", package = "foo", optional = true } + "#, + ) + .file("a/src/lib.rs", "extern crate b;") + .file( + "b/Cargo.toml", + r#" + [package] + name = "p2" + version = "0.1.0" + authors = [] + + [dependencies] + b = { version = "0.1", package = "bar", optional = true } + + [features] + default = ['b'] + "#, + ) + .file("b/src/lib.rs", "extern crate b;") + .build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn features_not_working() { + Package::new("foo", "0.1.0").publish(); + Package::new("bar", "0.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "test" + version = "0.1.0" + authors = [] + + [dependencies] + a = { path = 'a', package = 'p1', optional = true } + + [features] + default = ['p1'] + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("p1", "0.1.0")) + .build(); + + p.cargo("build -v") + .with_status(101) + .with_stderr( + "\ +error: failed to parse manifest at `[..]` + +Caused by: + Feature `default` includes `p1` which is neither a dependency nor another feature +", + ) + .run(); +} + +#[cargo_test] +fn rename_with_dash() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "qwerty" + version = "0.1.0" + + [dependencies] + foo-bar = { path = 'a', package = 'a' } + "#, + ) + .file("src/lib.rs", "extern crate foo_bar;") + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("build").run(); +} diff --git a/tests/testsuite/required_features.rs b/tests/testsuite/required_features.rs new file mode 100644 index 00000000000..1e82b0fda71 --- /dev/null +++ b/tests/testsuite/required_features.rs @@ -0,0 +1,1161 @@ +use crate::support::install::{assert_has_installed_exe, assert_has_not_installed_exe, cargo_home}; +use crate::support::is_nightly; +use crate::support::project; + +#[cargo_test] +fn build_bin_default_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#, + ) + .file( + "src/main.rs", + r#" + extern crate foo; + + #[cfg(feature = "a")] + fn test() { + foo::foo(); + } + + fn main() {} + "#, + ) + .file("src/lib.rs", r#"#[cfg(feature = "a")] pub fn foo() {}"#) + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + + p.cargo("build --no-default-features").run(); + + p.cargo("build --bin=foo").run(); + assert!(p.bin("foo").is_file()); + + p.cargo("build --bin=foo --no-default-features") + .with_status(101) + .with_stderr( + "\ +error: target `foo` in package `foo` requires the features: `a` +Consider enabling them by passing, e.g., `--features=\"a\"` +", + ) + .run(); +} + +#[cargo_test] +fn build_bin_arg_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build --features a").run(); + assert!(p.bin("foo").is_file()); +} + +#[cargo_test] +fn build_bin_multiple_required_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bin]] + name = "foo_1" + path = "src/foo_1.rs" + required-features = ["b", "c"] + + [[bin]] + name = "foo_2" + path = "src/foo_2.rs" + required-features = ["a"] + "#, + ) + .file("src/foo_1.rs", "fn main() {}") + .file("src/foo_2.rs", "fn main() {}") + .build(); + + p.cargo("build").run(); + + assert!(!p.bin("foo_1").is_file()); + assert!(p.bin("foo_2").is_file()); + + p.cargo("build --features c").run(); + + assert!(p.bin("foo_1").is_file()); + assert!(p.bin("foo_2").is_file()); + + p.cargo("build --no-default-features").run(); +} + +#[cargo_test] +fn build_example_default_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[example]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("examples/foo.rs", "fn main() {}") + .build(); + + p.cargo("build --example=foo").run(); + assert!(p.bin("examples/foo").is_file()); + + p.cargo("build --example=foo --no-default-features") + .with_status(101) + .with_stderr( + "\ +error: target `foo` in package `foo` requires the features: `a` +Consider enabling them by passing, e.g., `--features=\"a\"` +", + ) + .run(); +} + +#[cargo_test] +fn build_example_arg_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[example]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("examples/foo.rs", "fn main() {}") + .build(); + + p.cargo("build --example=foo --features a").run(); + assert!(p.bin("examples/foo").is_file()); +} + +#[cargo_test] +fn build_example_multiple_required_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[example]] + name = "foo_1" + required-features = ["b", "c"] + + [[example]] + name = "foo_2" + required-features = ["a"] + "#, + ) + .file("examples/foo_1.rs", "fn main() {}") + .file("examples/foo_2.rs", "fn main() {}") + .build(); + + p.cargo("build --example=foo_1") + .with_status(101) + .with_stderr( + "\ +error: target `foo_1` in package `foo` requires the features: `b`, `c` +Consider enabling them by passing, e.g., `--features=\"b c\"` +", + ) + .run(); + p.cargo("build --example=foo_2").run(); + + assert!(!p.bin("examples/foo_1").is_file()); + assert!(p.bin("examples/foo_2").is_file()); + + p.cargo("build --example=foo_1 --features c").run(); + p.cargo("build --example=foo_2 --features c").run(); + + assert!(p.bin("examples/foo_1").is_file()); + assert!(p.bin("examples/foo_2").is_file()); + + p.cargo("build --example=foo_1 --no-default-features") + .with_status(101) + .with_stderr( + "\ +error: target `foo_1` in package `foo` requires the features: `b`, `c` +Consider enabling them by passing, e.g., `--features=\"b c\"` +", + ) + .run(); + p.cargo("build --example=foo_2 --no-default-features") + .with_status(101) + .with_stderr( + "\ +error: target `foo_2` in package `foo` requires the features: `a` +Consider enabling them by passing, e.g., `--features=\"a\"` +", + ) + .run(); +} + +#[cargo_test] +fn test_default_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[test]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("tests/foo.rs", "#[test]\nfn test() {}") + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test test ... ok") + .run(); + + p.cargo("test --no-default-features") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .with_stdout("") + .run(); + + p.cargo("test --test=foo") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test test ... ok") + .run(); + + p.cargo("test --test=foo --no-default-features") + .with_status(101) + .with_stderr( + "\ +error: target `foo` in package `foo` requires the features: `a` +Consider enabling them by passing, e.g., `--features=\"a\"` +", + ) + .run(); +} + +#[cargo_test] +fn test_arg_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[test]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("tests/foo.rs", "#[test]\nfn test() {}") + .build(); + + p.cargo("test --features a") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test test ... ok") + .run(); +} + +#[cargo_test] +fn test_multiple_required_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[test]] + name = "foo_1" + required-features = ["b", "c"] + + [[test]] + name = "foo_2" + required-features = ["a"] + "#, + ) + .file("tests/foo_1.rs", "#[test]\nfn test() {}") + .file("tests/foo_2.rs", "#[test]\nfn test() {}") + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo_2-[..][EXE]", + ) + .with_stdout_contains("test test ... ok") + .run(); + + p.cargo("test --features c") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo_1-[..][EXE] +[RUNNING] target/debug/deps/foo_2-[..][EXE]", + ) + .with_stdout_contains_n("test test ... ok", 2) + .run(); + + p.cargo("test --no-default-features") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .with_stdout("") + .run(); +} + +#[cargo_test] +fn bench_default_features() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bench]] + name = "foo" + required-features = ["a"] + "#, + ) + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test bench ... bench: [..]") + .run(); + + p.cargo("bench --no-default-features") + .with_stderr("[FINISHED] release [optimized] target(s) in [..]".to_string()) + .with_stdout("") + .run(); + + p.cargo("bench --bench=foo") + .with_stderr( + "\ +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test bench ... bench: [..]") + .run(); + + p.cargo("bench --bench=foo --no-default-features") + .with_status(101) + .with_stderr( + "\ +error: target `foo` in package `foo` requires the features: `a` +Consider enabling them by passing, e.g., `--features=\"a\"` +", + ) + .run(); +} + +#[cargo_test] +fn bench_arg_features() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bench]] + name = "foo" + required-features = ["a"] + "#, + ) + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .build(); + + p.cargo("bench --features a") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test bench ... bench: [..]") + .run(); +} + +#[cargo_test] +fn bench_multiple_required_features() { + if !is_nightly() { + return; + } + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bench]] + name = "foo_1" + required-features = ["b", "c"] + + [[bench]] + name = "foo_2" + required-features = ["a"] + "#, + ) + .file( + "benches/foo_1.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .file( + "benches/foo_2.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .build(); + + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo_2-[..][EXE]", + ) + .with_stdout_contains("test bench ... bench: [..]") + .run(); + + p.cargo("bench --features c") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo_1-[..][EXE] +[RUNNING] target/release/deps/foo_2-[..][EXE]", + ) + .with_stdout_contains_n("test bench ... bench: [..]", 2) + .run(); + + p.cargo("bench --no-default-features") + .with_stderr("[FINISHED] release [optimized] target(s) in [..]") + .with_stdout("") + .run(); +} + +#[cargo_test] +fn install_default_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + + [[example]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .build(); + + p.cargo("install --path .").run(); + assert_has_installed_exe(cargo_home(), "foo"); + p.cargo("uninstall foo").run(); + + p.cargo("install --path . --no-default-features") + .with_status(101) + .with_stderr( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +", + ) + .run(); + assert_has_not_installed_exe(cargo_home(), "foo"); + + p.cargo("install --path . --bin=foo").run(); + assert_has_installed_exe(cargo_home(), "foo"); + p.cargo("uninstall foo").run(); + + p.cargo("install --path . --bin=foo --no-default-features") + .with_status(101) + .with_stderr( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ + `[..]target` + +Caused by: + target `foo` in package `foo` requires the features: `a` +Consider enabling them by passing, e.g., `--features=\"a\"` +", + ) + .run(); + assert_has_not_installed_exe(cargo_home(), "foo"); + + p.cargo("install --path . --example=foo").run(); + assert_has_installed_exe(cargo_home(), "foo"); + p.cargo("uninstall foo").run(); + + p.cargo("install --path . --example=foo --no-default-features") + .with_status(101) + .with_stderr( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ + `[..]target` + +Caused by: + target `foo` in package `foo` requires the features: `a` +Consider enabling them by passing, e.g., `--features=\"a\"` +", + ) + .run(); + assert_has_not_installed_exe(cargo_home(), "foo"); +} + +#[cargo_test] +fn install_arg_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("install --features a").run(); + assert_has_installed_exe(cargo_home(), "foo"); + p.cargo("uninstall foo").run(); +} + +#[cargo_test] +fn install_multiple_required_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a", "b"] + a = [] + b = ["a"] + c = [] + + [[bin]] + name = "foo_1" + path = "src/foo_1.rs" + required-features = ["b", "c"] + + [[bin]] + name = "foo_2" + path = "src/foo_2.rs" + required-features = ["a"] + "#, + ) + .file("src/foo_1.rs", "fn main() {}") + .file("src/foo_2.rs", "fn main() {}") + .build(); + + p.cargo("install --path .").run(); + assert_has_not_installed_exe(cargo_home(), "foo_1"); + assert_has_installed_exe(cargo_home(), "foo_2"); + p.cargo("uninstall foo").run(); + + p.cargo("install --path . --features c").run(); + assert_has_installed_exe(cargo_home(), "foo_1"); + assert_has_installed_exe(cargo_home(), "foo_2"); + p.cargo("uninstall foo").run(); + + p.cargo("install --path . --no-default-features") + .with_status(101) + .with_stderr( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +", + ) + .run(); + assert_has_not_installed_exe(cargo_home(), "foo_1"); + assert_has_not_installed_exe(cargo_home(), "foo_2"); +} + +#[cargo_test] +fn dep_feature_in_toml() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar", features = ["a"] } + + [[bin]] + name = "foo" + required-features = ["bar/a"] + + [[example]] + name = "foo" + required-features = ["bar/a"] + + [[test]] + name = "foo" + required-features = ["bar/a"] + + [[bench]] + name = "foo" + required-features = ["bar/a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .file("tests/foo.rs", "#[test]\nfn test() {}") + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + a = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + // bin + p.cargo("build --bin=foo").run(); + assert!(p.bin("foo").is_file()); + + // example + p.cargo("build --example=foo").run(); + assert!(p.bin("examples/foo").is_file()); + + // test + p.cargo("test --test=foo") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test test ... ok") + .run(); + + // bench + if is_nightly() { + p.cargo("bench --bench=foo") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test bench ... bench: [..]") + .run(); + } + + // install + p.cargo("install").run(); + assert_has_installed_exe(cargo_home(), "foo"); + p.cargo("uninstall foo").run(); +} + +#[cargo_test] +fn dep_feature_in_cmd_line() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [[bin]] + name = "foo" + required-features = ["bar/a"] + + [[example]] + name = "foo" + required-features = ["bar/a"] + + [[test]] + name = "foo" + required-features = ["bar/a"] + + [[bench]] + name = "foo" + required-features = ["bar/a"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("examples/foo.rs", "fn main() {}") + .file("tests/foo.rs", "#[test]\nfn test() {}") + .file( + "benches/foo.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn bench(_: &mut test::Bencher) { + }"#, + ) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [features] + a = [] + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + // bin + p.cargo("build --bin=foo") + .with_status(101) + .with_stderr( + "\ +error: target `foo` in package `foo` requires the features: `bar/a` +Consider enabling them by passing, e.g., `--features=\"bar/a\"` +", + ) + .run(); + + p.cargo("build --bin=foo --features bar/a").run(); + assert!(p.bin("foo").is_file()); + + // example + p.cargo("build --example=foo") + .with_status(101) + .with_stderr( + "\ +error: target `foo` in package `foo` requires the features: `bar/a` +Consider enabling them by passing, e.g., `--features=\"bar/a\"` +", + ) + .run(); + + p.cargo("build --example=foo --features bar/a").run(); + assert!(p.bin("examples/foo").is_file()); + + // test + p.cargo("test") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .with_stdout("") + .run(); + + p.cargo("test --test=foo --features bar/a") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test test ... ok") + .run(); + + // bench + if is_nightly() { + p.cargo("bench") + .with_stderr("[FINISHED] release [optimized] target(s) in [..]") + .with_stdout("") + .run(); + + p.cargo("bench --bench=foo --features bar/a") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test bench ... bench: [..]") + .run(); + } + + // install + p.cargo("install --path .") + .with_status(101) + .with_stderr( + "\ +[INSTALLING] foo v0.0.1 ([..]) +[FINISHED] release [optimized] target(s) in [..] +[ERROR] no binaries are available for install using the selected features +", + ) + .run(); + assert_has_not_installed_exe(cargo_home(), "foo"); + + p.cargo("install --features bar/a").run(); + assert_has_installed_exe(cargo_home(), "foo"); + p.cargo("uninstall foo").run(); +} + +#[cargo_test] +fn test_skips_compiling_bin_with_missing_required_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + a = [] + + [[bin]] + name = "bin_foo" + path = "src/bin/foo.rs" + required-features = ["a"] + "#, + ) + .file("src/bin/foo.rs", "extern crate bar; fn main() {}") + .file("tests/foo.rs", "") + .file("benches/foo.rs", "") + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("running 0 tests") + .run(); + + p.cargo("test --features a -j 1") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +error[E0463]: can't find crate for `bar`", + ) + .run(); + + if is_nightly() { + p.cargo("bench") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] target/release/deps/foo-[..][EXE]", + ) + .with_stdout_contains("running 0 tests") + .run(); + + p.cargo("bench --features a -j 1") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +error[E0463]: can't find crate for `bar`", + ) + .run(); + } +} + +#[cargo_test] +fn run_default() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = [] + a = [] + + [[bin]] + name = "foo" + required-features = ["a"] + "#, + ) + .file("src/lib.rs", "") + .file("src/main.rs", "extern crate foo; fn main() {}") + .build(); + + p.cargo("run") + .with_status(101) + .with_stderr( + "\ +error: target `foo` in package `foo` requires the features: `a` +Consider enabling them by passing, e.g., `--features=\"a\"` +", + ) + .run(); + + p.cargo("run --features a").run(); +} + +#[cargo_test] +fn run_default_multiple_required_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + default = ["a"] + a = [] + b = [] + + [[bin]] + name = "foo1" + path = "src/foo1.rs" + required-features = ["a"] + + [[bin]] + name = "foo2" + path = "src/foo2.rs" + required-features = ["b"] + "#, + ) + .file("src/lib.rs", "") + .file("src/foo1.rs", "extern crate foo; fn main() {}") + .file("src/foo2.rs", "extern crate foo; fn main() {}") + .build(); + + p.cargo("run") + .with_status(101) + .with_stderr( + "\ +error: `cargo run` could not determine which binary to run[..] +available binaries: foo1, foo2", + ) + .run(); +} diff --git a/tests/testsuite/resolve.rs b/tests/testsuite/resolve.rs new file mode 100644 index 00000000000..ae428d9a99f --- /dev/null +++ b/tests/testsuite/resolve.rs @@ -0,0 +1,34 @@ +use crate::support::project; +use crate::support::registry::Package; + +// Ensure that the "-Z minimal-versions" CLI option works and the minimal +// version of a dependency ends up in the lock file. +#[cargo_test] +fn minimal_version_cli() { + Package::new("dep", "1.0.0").publish(); + Package::new("dep", "1.1.0").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + + [dependencies] + dep = "1.0" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("generate-lockfile -Zminimal-versions") + .masquerade_as_nightly_cargo() + .run(); + + let lock = p.read_lockfile(); + + assert!(lock.contains("dep 1.0.0")); +} diff --git a/tests/testsuite/run.rs b/tests/testsuite/run.rs new file mode 100644 index 00000000000..04afa8fc244 --- /dev/null +++ b/tests/testsuite/run.rs @@ -0,0 +1,1161 @@ +use crate::support::{basic_bin_manifest, basic_lib_manifest, project, Project}; +use cargo::util::paths::dylib_path_envvar; + +#[cargo_test] +fn simple() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + + p.cargo("run") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/foo[EXE]`", + ) + .with_stdout("hello") + .run(); + assert!(p.bin("foo").is_file()); +} + +#[cargo_test] +fn simple_quiet() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + + p.cargo("run -q").with_stdout("hello").run(); + + p.cargo("run --quiet").with_stdout("hello").run(); +} + +#[cargo_test] +fn simple_quiet_and_verbose() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + + p.cargo("run -q -v") + .with_status(101) + .with_stderr("[ERROR] cannot set both --verbose and --quiet") + .run(); +} + +#[cargo_test] +fn quiet_and_verbose_config() { + let p = project() + .file( + ".cargo/config", + r#" + [term] + verbose = true + "#, + ) + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + + p.cargo("run -q").run(); +} + +#[cargo_test] +fn simple_with_args() { + let p = project() + .file( + "src/main.rs", + r#" + fn main() { + assert_eq!(std::env::args().nth(1).unwrap(), "hello"); + assert_eq!(std::env::args().nth(2).unwrap(), "world"); + } + "#, + ) + .build(); + + p.cargo("run hello world").run(); +} + +#[cfg(unix)] +#[cargo_test] +fn simple_with_non_utf8_args() { + use std::os::unix::ffi::OsStrExt; + + let p = project() + .file( + "src/main.rs", + r#" + use std::ffi::OsStr; + use std::os::unix::ffi::OsStrExt; + + fn main() { + assert_eq!(std::env::args_os().nth(1).unwrap(), OsStr::from_bytes(b"hello")); + assert_eq!(std::env::args_os().nth(2).unwrap(), OsStr::from_bytes(b"ab\xffcd")); + } + "#, + ) + .build(); + + p.cargo("run") + .arg("hello") + .arg(std::ffi::OsStr::from_bytes(b"ab\xFFcd")) + .run(); +} + +#[cargo_test] +fn exit_code() { + let p = project() + .file("src/main.rs", "fn main() { std::process::exit(2); }") + .build(); + + let mut output = String::from( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[..]` +", + ); + if !cfg!(unix) { + output.push_str( + "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2)", + ); + } + p.cargo("run").with_status(2).with_stderr(output).run(); +} + +#[cargo_test] +fn exit_code_verbose() { + let p = project() + .file("src/main.rs", "fn main() { std::process::exit(2); }") + .build(); + + let mut output = String::from( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target[..]` +", + ); + if !cfg!(unix) { + output.push_str( + "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit code: 2)", + ); + } + + p.cargo("run -v").with_status(2).with_stderr(output).run(); +} + +#[cargo_test] +fn no_main_file() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("run") + .with_status(101) + .with_stderr( + "[ERROR] a bin target must be available \ + for `cargo run`\n", + ) + .run(); +} + +#[cargo_test] +fn too_many_bins() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "") + .file("src/bin/b.rs", "") + .build(); + + // Using [..] here because the order is not stable + p.cargo("run") + .with_status(101) + .with_stderr( + "[ERROR] `cargo run` could not determine which binary to run. \ + Use the `--bin` option to specify a binary, or the \ + `default-run` manifest key.\ + \navailable binaries: [..]\n", + ) + .run(); +} + +#[cargo_test] +fn specify_name() { + let p = project() + .file("src/lib.rs", "") + .file( + "src/bin/a.rs", + r#" + #[allow(unused_extern_crates)] + extern crate foo; + fn main() { println!("hello a.rs"); } + "#, + ) + .file( + "src/bin/b.rs", + r#" + #[allow(unused_extern_crates)] + extern crate foo; + fn main() { println!("hello b.rs"); } + "#, + ) + .build(); + + p.cargo("run --bin a -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc [..] src/lib.rs [..]` +[RUNNING] `rustc [..] src/bin/a.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/a[EXE]`", + ) + .with_stdout("hello a.rs") + .run(); + + p.cargo("run --bin b -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] src/bin/b.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/b[EXE]`", + ) + .with_stdout("hello b.rs") + .run(); +} + +#[cargo_test] +fn specify_default_run() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + default-run = "a" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#) + .file("src/bin/b.rs", r#"fn main() { println!("hello B"); }"#) + .build(); + + p.cargo("run").with_stdout("hello A").run(); + p.cargo("run --bin a").with_stdout("hello A").run(); + p.cargo("run --bin b").with_stdout("hello B").run(); +} + +#[cargo_test] +fn bogus_default_run() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + default-run = "b" + "#, + ) + .file("src/lib.rs", "") + .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#) + .build(); + + p.cargo("run") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` + +Caused by: + default-run target `b` not found + +Did you mean `a`? +", + ) + .run(); +} + +#[cargo_test] +fn run_example() { + let p = project() + .file("src/lib.rs", "") + .file("examples/a.rs", r#"fn main() { println!("example"); }"#) + .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#) + .build(); + + p.cargo("run --example a") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/examples/a[EXE]`", + ) + .with_stdout("example") + .run(); +} + +#[cargo_test] +fn run_library_example() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + [[example]] + name = "bar" + crate_type = ["lib"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/bar.rs", "fn foo() {}") + .build(); + + p.cargo("run --example bar") + .with_status(101) + .with_stderr("[ERROR] example target `bar` is a library and cannot be executed") + .run(); +} + +#[cargo_test] +fn run_bin_example() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + [[example]] + name = "bar" + crate_type = ["bin"] + "#, + ) + .file("src/lib.rs", "") + .file("examples/bar.rs", r#"fn main() { println!("example"); }"#) + .build(); + + p.cargo("run --example bar") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/examples/bar[EXE]`", + ) + .with_stdout("example") + .run(); +} + +fn autodiscover_examples_project(rust_edition: &str, autoexamples: Option) -> Project { + let autoexamples = match autoexamples { + None => "".to_string(), + Some(bool) => format!("autoexamples = {}", bool), + }; + project() + .file( + "Cargo.toml", + &format!( + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + edition = "{rust_edition}" + {autoexamples} + + [features] + magic = [] + + [[example]] + name = "do_magic" + required-features = ["magic"] + "#, + rust_edition = rust_edition, + autoexamples = autoexamples + ), + ) + .file("examples/a.rs", r#"fn main() { println!("example"); }"#) + .file( + "examples/do_magic.rs", + r#" + fn main() { println!("magic example"); } + "#, + ) + .build() +} + +#[cargo_test] +fn run_example_autodiscover_2015() { + let p = autodiscover_examples_project("2015", None); + p.cargo("run --example a") + .with_status(101) + .with_stderr( + "warning: \ +An explicit [[example]] section is specified in Cargo.toml which currently +disables Cargo from automatically inferring other example targets. +This inference behavior will change in the Rust 2018 edition and the following +files will be included as a example target: + +* [..]a.rs + +This is likely to break cargo build or cargo test as these files may not be +ready to be compiled as a example target today. You can future-proof yourself +and disable this warning by adding `autoexamples = false` to your [package] +section. You may also move the files to a location where Cargo would not +automatically infer them to be a target, such as in subfolders. + +For more information on this warning you can consult +https://github.com/rust-lang/cargo/issues/5330 +error: no example target named `a` +", + ) + .run(); +} + +#[cargo_test] +fn run_example_autodiscover_2015_with_autoexamples_enabled() { + let p = autodiscover_examples_project("2015", Some(true)); + p.cargo("run --example a") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/examples/a[EXE]`", + ) + .with_stdout("example") + .run(); +} + +#[cargo_test] +fn run_example_autodiscover_2015_with_autoexamples_disabled() { + let p = autodiscover_examples_project("2015", Some(false)); + p.cargo("run --example a") + .with_status(101) + .with_stderr("error: no example target named `a`\n") + .run(); +} + +#[cargo_test] +fn run_example_autodiscover_2018() { + let p = autodiscover_examples_project("2018", None); + p.cargo("run --example a") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/examples/a[EXE]`", + ) + .with_stdout("example") + .run(); +} + +#[cargo_test] +fn autobins_disables() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + autobins = false + "#, + ) + .file("src/lib.rs", "pub mod bin;") + .file("src/bin/mod.rs", "// empty") + .build(); + + p.cargo("run") + .with_status(101) + .with_stderr("[ERROR] a bin target must be available for `cargo run`") + .run(); +} + +#[cargo_test] +fn run_bins() { + let p = project() + .file("src/lib.rs", "") + .file("examples/a.rs", r#"fn main() { println!("example"); }"#) + .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#) + .build(); + + p.cargo("run --bins") + .with_status(1) + .with_stderr_contains( + "error: Found argument '--bins' which wasn't expected, or isn't valid in this context", + ) + .run(); +} + +#[cargo_test] +fn run_with_filename() { + let p = project() + .file("src/lib.rs", "") + .file( + "src/bin/a.rs", + r#" + extern crate foo; + fn main() { println!("hello a.rs"); } + "#, + ) + .file("examples/a.rs", r#"fn main() { println!("example"); }"#) + .build(); + + p.cargo("run --bin bin.rs") + .with_status(101) + .with_stderr("[ERROR] no bin target named `bin.rs`") + .run(); + + p.cargo("run --bin a.rs") + .with_status(101) + .with_stderr( + "\ +[ERROR] no bin target named `a.rs` + +Did you mean `a`?", + ) + .run(); + + p.cargo("run --example example.rs") + .with_status(101) + .with_stderr("[ERROR] no example target named `example.rs`") + .run(); + + p.cargo("run --example a.rs") + .with_status(101) + .with_stderr( + "\ +[ERROR] no example target named `a.rs` + +Did you mean `a`?", + ) + .run(); +} + +#[cargo_test] +fn either_name_or_example() { + let p = project() + .file("src/bin/a.rs", r#"fn main() { println!("hello a.rs"); }"#) + .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#) + .build(); + + p.cargo("run --bin a --example b") + .with_status(101) + .with_stderr( + "[ERROR] `cargo run` can run at most one \ + executable, but multiple were \ + specified", + ) + .run(); +} + +#[cargo_test] +fn one_bin_multiple_examples() { + let p = project() + .file("src/lib.rs", "") + .file( + "src/bin/main.rs", + r#"fn main() { println!("hello main.rs"); }"#, + ) + .file("examples/a.rs", r#"fn main() { println!("hello a.rs"); }"#) + .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#) + .build(); + + p.cargo("run") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/main[EXE]`", + ) + .with_stdout("hello main.rs") + .run(); +} + +#[cargo_test] +fn example_with_release_flag() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + version = "*" + path = "bar" + "#, + ) + .file( + "examples/a.rs", + r#" + extern crate bar; + + fn main() { + if cfg!(debug_assertions) { + println!("slow1") + } else { + println!("fast1") + } + bar::baz(); + } + "#, + ) + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file( + "bar/src/bar.rs", + r#" + pub fn baz() { + if cfg!(debug_assertions) { + println!("slow2") + } else { + println!("fast2") + } + } + "#, + ) + .build(); + + p.cargo("run -v --release --example a") + .with_stderr( + "\ +[COMPILING] bar v0.5.0 ([CWD]/bar) +[RUNNING] `rustc --crate-name bar bar/src/bar.rs --color never --crate-type lib \ + --emit=[..]link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [CWD]/target/release/deps \ + -L dependency=[CWD]/target/release/deps` +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name a examples/a.rs --color never --crate-type bin \ + --emit=[..]link \ + -C opt-level=3 \ + -C metadata=[..] \ + --out-dir [CWD]/target/release/examples \ + -L dependency=[CWD]/target/release/deps \ + --extern bar=[CWD]/target/release/deps/libbar-[..].rlib` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `target/release/examples/a[EXE]` +", + ) + .with_stdout( + "\ +fast1 +fast2", + ) + .run(); + + p.cargo("run -v --example a") + .with_stderr( + "\ +[COMPILING] bar v0.5.0 ([CWD]/bar) +[RUNNING] `rustc --crate-name bar bar/src/bar.rs --color never --crate-type lib \ + --emit=[..]link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [CWD]/target/debug/deps \ + -L dependency=[CWD]/target/debug/deps` +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name a examples/a.rs --color never --crate-type bin \ + --emit=[..]link \ + -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [CWD]/target/debug/examples \ + -L dependency=[CWD]/target/debug/deps \ + --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `target/debug/examples/a[EXE]` +", + ) + .with_stdout( + "\ +slow1 +slow2", + ) + .run(); +} + +#[cargo_test] +fn run_dylib_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/main.rs", + r#"extern crate bar; fn main() { bar::bar(); }"#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate-type = ["dylib"] + "#, + ) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("run hello world").run(); +} + +#[cargo_test] +fn release_works() { + let p = project() + .file( + "src/main.rs", + r#" + fn main() { if cfg!(debug_assertions) { panic!() } } + "#, + ) + .build(); + + p.cargo("run --release") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `target/release/foo[EXE]` +", + ) + .run(); + assert!(p.release_bin("foo").is_file()); +} + +#[cargo_test] +fn run_bin_different_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "bar" + "#, + ) + .file("src/bar.rs", "fn main() {}") + .build(); + + p.cargo("run").run(); +} + +#[cargo_test] +fn dashes_are_forwarded() { + let p = project() + .file( + "src/bin/bar.rs", + r#" + fn main() { + let s: Vec = std::env::args().collect(); + assert_eq!(s[1], "--"); + assert_eq!(s[2], "a"); + assert_eq!(s[3], "--"); + assert_eq!(s[4], "b"); + } + "#, + ) + .build(); + + p.cargo("run -- -- a -- b").run(); +} + +#[cargo_test] +fn run_from_executable_folder() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + + let cwd = p.root().join("target").join("debug"); + p.cargo("build").run(); + + p.cargo("run") + .cwd(cwd) + .with_stderr( + "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\ + [RUNNING] `./foo[EXE]`", + ) + .with_stdout("hello") + .run(); +} + +#[cargo_test] +fn run_with_library_paths() { + let p = project(); + + // Only link search directories within the target output directory are + // propagated through to dylib_path_envvar() (see #3366). + let mut dir1 = p.target_debug_dir(); + dir1.push("foo\\backslash"); + + let mut dir2 = p.target_debug_dir(); + dir2.push("dir=containing=equal=signs"); + + let p = p + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + &format!( + r##" + fn main() {{ + println!(r#"cargo:rustc-link-search=native={}"#); + println!(r#"cargo:rustc-link-search={}"#); + }} + "##, + dir1.display(), + dir2.display() + ), + ) + .file( + "src/main.rs", + &format!( + r##" + fn main() {{ + let search_path = std::env::var_os("{}").unwrap(); + let paths = std::env::split_paths(&search_path).collect::>(); + assert!(paths.contains(&r#"{}"#.into())); + assert!(paths.contains(&r#"{}"#.into())); + }} + "##, + dylib_path_envvar(), + dir1.display(), + dir2.display() + ), + ) + .build(); + + p.cargo("run").run(); +} + +#[cargo_test] +fn library_paths_sorted_alphabetically() { + let p = project(); + + let mut dir1 = p.target_debug_dir(); + dir1.push("zzzzzzz"); + + let mut dir2 = p.target_debug_dir(); + dir2.push("BBBBBBB"); + + let mut dir3 = p.target_debug_dir(); + dir3.push("aaaaaaa"); + + let p = p + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file( + "build.rs", + &format!( + r##" + fn main() {{ + println!(r#"cargo:rustc-link-search=native={}"#); + println!(r#"cargo:rustc-link-search=native={}"#); + println!(r#"cargo:rustc-link-search=native={}"#); + }} + "##, + dir1.display(), + dir2.display(), + dir3.display() + ), + ) + .file( + "src/main.rs", + &format!( + r##" + fn main() {{ + let search_path = std::env::var_os("{}").unwrap(); + let paths = std::env::split_paths(&search_path).collect::>(); + // ASCII case-sensitive sort + assert_eq!("BBBBBBB", paths[0].file_name().unwrap().to_string_lossy()); + assert_eq!("aaaaaaa", paths[1].file_name().unwrap().to_string_lossy()); + assert_eq!("zzzzzzz", paths[2].file_name().unwrap().to_string_lossy()); + }} + "##, + dylib_path_envvar() + ), + ) + .build(); + + p.cargo("run").run(); +} + +#[cargo_test] +fn fail_no_extra_verbose() { + let p = project() + .file("src/main.rs", "fn main() { std::process::exit(1); }") + .build(); + + p.cargo("run -q") + .with_status(1) + .with_stdout("") + .with_stderr("") + .run(); +} + +#[cargo_test] +fn run_multiple_packages() { + let p = project() + .no_manifest() + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [workspace] + + [dependencies] + d1 = { path = "d1" } + d2 = { path = "d2" } + d3 = { path = "../d3" } # outside of the workspace + + [[bin]] + name = "foo" + "#, + ) + .file("foo/src/foo.rs", "fn main() { println!(\"foo\"); }") + .file("foo/d1/Cargo.toml", &basic_bin_manifest("d1")) + .file("foo/d1/src/lib.rs", "") + .file("foo/d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file("foo/d2/Cargo.toml", &basic_bin_manifest("d2")) + .file("foo/d2/src/main.rs", "fn main() { println!(\"d2\"); }") + .file("d3/Cargo.toml", &basic_bin_manifest("d3")) + .file("d3/src/main.rs", "fn main() { println!(\"d2\"); }") + .build(); + + let cargo = || { + let mut process_builder = p.cargo("run"); + process_builder.cwd("foo"); + process_builder + }; + + cargo().arg("-p").arg("d1").with_stdout("d1").run(); + + cargo() + .arg("-p") + .arg("d2") + .arg("--bin") + .arg("d2") + .with_stdout("d2") + .run(); + + cargo().with_stdout("foo").run(); + + cargo().arg("-p").arg("d1").arg("-p").arg("d2") + .with_status(1) + .with_stderr_contains("error: The argument '--package ' was provided more than once, but cannot be used multiple times").run(); + + cargo() + .arg("-p") + .arg("d3") + .with_status(101) + .with_stderr_contains("[ERROR] package `d3` is not a member of the workspace") + .run(); +} + +#[cargo_test] +fn explicit_bin_with_args() { + let p = project() + .file( + "src/main.rs", + r#" + fn main() { + assert_eq!(std::env::args().nth(1).unwrap(), "hello"); + assert_eq!(std::env::args().nth(2).unwrap(), "world"); + } + "#, + ) + .build(); + + p.cargo("run --bin foo hello world").run(); +} + +#[cargo_test] +fn run_workspace() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_bin_manifest("a")) + .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#) + .file("b/Cargo.toml", &basic_bin_manifest("b")) + .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#) + .build(); + + p.cargo("run") + .with_status(101) + .with_stderr( + "\ +[ERROR] `cargo run` could not determine which binary to run[..] +available binaries: a, b", + ) + .run(); + p.cargo("run --bin a").with_stdout("run-a").run(); +} + +#[cargo_test] +fn default_run_workspace() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.0.1" + default-run = "a" + "#, + ) + .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#) + .file("b/Cargo.toml", &basic_bin_manifest("b")) + .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#) + .build(); + + p.cargo("run").with_stdout("run-a").run(); +} + +#[cargo_test] +#[cfg(target_os = "macos")] +fn run_link_system_path_macos() { + use crate::support::paths::{self, CargoPathExt}; + use std::fs; + // Check that the default system library path is honored. + // First, build a shared library that will be accessed from + // DYLD_FALLBACK_LIBRARY_PATH. + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + [lib] + crate-type = ["cdylib"] + "#, + ) + .file( + "src/lib.rs", + "#[no_mangle] pub extern fn something_shared() {}", + ) + .build(); + p.cargo("build").run(); + + // This is convoluted. Since this test can't modify things in /usr, + // this needs to dance around to check that things work. + // + // The default DYLD_FALLBACK_LIBRARY_PATH is: + // $(HOME)/lib:/usr/local/lib:/lib:/usr/lib + // + // This will make use of ~/lib in the path, but the default cc link + // path is /usr/lib:/usr/local/lib. So first need to build in one + // location, and then move it to ~/lib. + // + // 1. Build with rustc-link-search pointing to libfoo so the initial + // binary can be linked. + // 2. Move the library to ~/lib + // 3. Run `cargo run` to make sure it can still find the library in + // ~/lib. + // + // This should be equivalent to having the library in /usr/local/lib. + let p2 = project() + .at("bar") + .file("Cargo.toml", &basic_bin_manifest("bar")) + .file( + "src/main.rs", + r#" + extern { + fn something_shared(); + } + fn main() { + unsafe { something_shared(); } + } + "#, + ) + .file( + "build.rs", + &format!( + r#" + fn main() {{ + println!("cargo:rustc-link-lib=foo"); + println!("cargo:rustc-link-search={}"); + }} + "#, + p.target_debug_dir().display() + ), + ) + .build(); + p2.cargo("build").run(); + p2.cargo("test").run(); + + let libdir = paths::home().join("lib"); + fs::create_dir(&libdir).unwrap(); + fs::rename( + p.target_debug_dir().join("libfoo.dylib"), + libdir.join("libfoo.dylib"), + ) + .unwrap(); + p.root().rm_rf(); + const VAR: &str = "DYLD_FALLBACK_LIBRARY_PATH"; + // Reset DYLD_FALLBACK_LIBRARY_PATH so that we don't inherit anything that + // was set by the cargo that invoked the test. + p2.cargo("run").env_remove(VAR).run(); + p2.cargo("test").env_remove(VAR).run(); + // Ensure this still works when DYLD_FALLBACK_LIBRARY_PATH has + // a value set. + p2.cargo("run").env(VAR, &libdir).run(); + p2.cargo("test").env(VAR, &libdir).run(); +} diff --git a/tests/testsuite/rustc.rs b/tests/testsuite/rustc.rs new file mode 100644 index 00000000000..9fb3ad240cc --- /dev/null +++ b/tests/testsuite/rustc.rs @@ -0,0 +1,437 @@ +use crate::support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project}; + +const CARGO_RUSTC_ERROR: &str = + "[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering +the package by passing, e.g., `--lib` or `--bin NAME` to specify a single target"; + +#[cargo_test] +fn build_lib_for_foo() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("rustc --lib -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn lib() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("rustc --lib -v -- -C debug-assertions=off") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C debug-assertions=off \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn build_main_and_allow_unstable_options() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("rustc -v --bin foo -- -C debug-assertions") + .with_stderr(format!( + "\ +[COMPILING] {name} v{version} ([CWD]) +[RUNNING] `rustc --crate-name {name} src/lib.rs --color never --crate-type lib \ + --emit=[..]link -C debuginfo=2 \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps` +[RUNNING] `rustc --crate-name {name} src/main.rs --color never --crate-type bin \ + --emit=[..]link -C debuginfo=2 \ + -C debug-assertions \ + -C metadata=[..] \ + --out-dir [..] \ + -L dependency=[CWD]/target/debug/deps \ + --extern {name}=[CWD]/target/debug/deps/lib{name}-[..].rlib` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + name = "foo", + version = "0.0.1" + )) + .run(); +} + +#[cargo_test] +fn fails_when_trying_to_build_main_and_lib_with_args() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("rustc -v -- -C debug-assertions") + .with_status(101) + .with_stderr(CARGO_RUSTC_ERROR) + .run(); +} + +#[cargo_test] +fn build_with_args_to_one_of_multiple_binaries() { + let p = project() + .file("src/bin/foo.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .file("src/bin/baz.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("rustc -v --bin bar -- -C debug-assertions") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib --emit=[..]link \ + -C debuginfo=2 -C metadata=[..] \ + --out-dir [..]` +[RUNNING] `rustc --crate-name bar src/bin/bar.rs --color never --crate-type bin --emit=[..]link \ + -C debuginfo=2 -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn fails_with_args_to_all_binaries() { + let p = project() + .file("src/bin/foo.rs", "fn main() {}") + .file("src/bin/bar.rs", "fn main() {}") + .file("src/bin/baz.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("rustc -v -- -C debug-assertions") + .with_status(101) + .with_stderr(CARGO_RUSTC_ERROR) + .run(); +} + +#[cargo_test] +fn build_with_args_to_one_of_multiple_tests() { + let p = project() + .file("tests/foo.rs", r#" "#) + .file("tests/bar.rs", r#" "#) + .file("tests/baz.rs", r#" "#) + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("rustc -v --test bar -- -C debug-assertions") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib --emit=[..]link \ + -C debuginfo=2 -C metadata=[..] \ + --out-dir [..]` +[RUNNING] `rustc --crate-name bar tests/bar.rs --color never --emit=[..]link -C debuginfo=2 \ + -C debug-assertions [..]--test[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn build_foo_with_bar_dependency() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("rustc -v -- -C debug-assertions") + .with_stderr( + "\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `[..] -C debuginfo=2 [..]` +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `[..] -C debuginfo=2 -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn build_only_bar_dependency() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("rustc -v -p bar -- -C debug-assertions") + .with_stderr( + "\ +[COMPILING] bar v0.1.0 ([..]) +[RUNNING] `rustc --crate-name bar [..] --color never --crate-type lib [..] -C debug-assertions [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn targets_selected_default() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("rustc -v") + // bin + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \ + --emit=[..]link[..]", + ) + // bench + .with_stderr_does_not_contain( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=[..]link \ + -C opt-level=3 --test [..]", + ) + // unit test + .with_stderr_does_not_contain( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=[..]link \ + -C debuginfo=2 --test [..]", + ) + .run(); +} + +#[cargo_test] +fn targets_selected_all() { + let p = project().file("src/main.rs", "fn main() {}").build(); + p.cargo("rustc -v --all-targets") + // bin + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \ + --emit=[..]link[..]", + ) + // unit test + .with_stderr_contains( + "[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=[..]link \ + -C debuginfo=2 --test [..]", + ) + .run(); +} + +#[cargo_test] +fn fail_with_multiple_packages() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#, + ) + .file("src/main.rs", "fn main() {}") + .build(); + + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file( + "src/main.rs", + r#" + fn main() { + if cfg!(flag = "1") { println!("Yeah from bar!"); } + } + "#, + ) + .build(); + + let _baz = project() + .at("baz") + .file("Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file( + "src/main.rs", + r#" + fn main() { + if cfg!(flag = "1") { println!("Yeah from baz!"); } + } + "#, + ) + .build(); + + foo.cargo("rustc -v -p bar -p baz") + .with_status(1) + .with_stderr_contains( + "\ +error: The argument '--package ' was provided more than once, \ + but cannot be used multiple times +", + ) + .run(); +} + +#[cargo_test] +fn rustc_with_other_profile() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + a = { path = "a" } + "#, + ) + .file( + "src/main.rs", + r#" + #[cfg(test)] extern crate a; + + #[test] + fn foo() {} + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("rustc --profile test").run(); +} + +#[cargo_test] +fn rustc_fingerprint() { + // Verify that the fingerprint includes the rustc args. + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", "") + .build(); + + p.cargo("rustc -v -- -C debug-assertions") + .with_stderr( + "\ +[COMPILING] foo [..] +[RUNNING] `rustc [..]-C debug-assertions [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("rustc -v -- -C debug-assertions") + .with_stderr( + "\ +[FRESH] foo [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("rustc -v") + .with_stderr_does_not_contain("-C debug-assertions") + .with_stderr( + "\ +[COMPILING] foo [..] +[RUNNING] `rustc [..] +[FINISHED] [..] +", + ) + .run(); + + p.cargo("rustc -v") + .with_stderr( + "\ +[FRESH] foo [..] +[FINISHED] [..] +", + ) + .run(); +} + +#[cargo_test] +fn rustc_test_with_implicit_bin() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + #[cfg(foo)] + fn f() { compile_fail!("Foo shouldn't be set."); } + fn main() {} + "#, + ) + .file( + "tests/test1.rs", + r#" + #[cfg(not(foo))] + fn f() { compile_fail!("Foo should be set."); } "#, + ) + .build(); + + p.cargo("rustc --test test1 -v -- --cfg foo") + .with_stderr_contains( + "\ +[RUNNING] `rustc --crate-name test1 tests/test1.rs [..] --cfg foo [..] +", + ) + .with_stderr_contains( + "\ +[RUNNING] `rustc --crate-name foo src/main.rs [..] +", + ) + .run(); +} diff --git a/tests/testsuite/rustc_info_cache.rs b/tests/testsuite/rustc_info_cache.rs new file mode 100644 index 00000000000..51dc4a42881 --- /dev/null +++ b/tests/testsuite/rustc_info_cache.rs @@ -0,0 +1,102 @@ +use crate::support::paths::CargoPathExt; +use crate::support::{basic_manifest, project}; +use std::env; + +#[cargo_test] +fn rustc_info_cache() { + let p = project() + .file("src/main.rs", r#"fn main() { println!("hello"); }"#) + .build(); + + let miss = "[..] rustc info cache miss[..]"; + let hit = "[..]rustc info cache hit[..]"; + let update = "[..]updated rustc info cache[..]"; + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .with_stderr_contains("[..]failed to read rustc info cache[..]") + .with_stderr_contains(miss) + .with_stderr_does_not_contain(hit) + .with_stderr_contains(update) + .run(); + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .with_stderr_contains("[..]reusing existing rustc info cache[..]") + .with_stderr_contains(hit) + .with_stderr_does_not_contain(miss) + .with_stderr_does_not_contain(update) + .run(); + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env("CARGO_CACHE_RUSTC_INFO", "0") + .with_stderr_contains("[..]rustc info cache disabled[..]") + .with_stderr_does_not_contain(update) + .run(); + + let other_rustc = { + let p = project() + .at("compiler") + .file("Cargo.toml", &basic_manifest("compiler", "0.1.0")) + .file( + "src/main.rs", + r#" + use std::process::Command; + use std::env; + + fn main() { + let mut cmd = Command::new("rustc"); + for arg in env::args_os().skip(1) { + cmd.arg(arg); + } + std::process::exit(cmd.status().unwrap().code().unwrap()); + } + "#, + ) + .build(); + p.cargo("build").run(); + + p.root() + .join("target/debug/compiler") + .with_extension(env::consts::EXE_EXTENSION) + }; + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env("RUSTC", other_rustc.display().to_string()) + .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") + .with_stderr_contains(miss) + .with_stderr_does_not_contain(hit) + .with_stderr_contains(update) + .run(); + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env("RUSTC", other_rustc.display().to_string()) + .with_stderr_contains("[..]reusing existing rustc info cache[..]") + .with_stderr_contains(hit) + .with_stderr_does_not_contain(miss) + .with_stderr_does_not_contain(update) + .run(); + + other_rustc.move_into_the_future(); + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env("RUSTC", other_rustc.display().to_string()) + .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") + .with_stderr_contains(miss) + .with_stderr_does_not_contain(hit) + .with_stderr_contains(update) + .run(); + + p.cargo("build") + .env("CARGO_LOG", "cargo::util::rustc=debug") + .env("RUSTC", other_rustc.display().to_string()) + .with_stderr_contains("[..]reusing existing rustc info cache[..]") + .with_stderr_contains(hit) + .with_stderr_does_not_contain(miss) + .with_stderr_does_not_contain(update) + .run(); +} diff --git a/tests/testsuite/rustdoc.rs b/tests/testsuite/rustdoc.rs new file mode 100644 index 00000000000..6525054444f --- /dev/null +++ b/tests/testsuite/rustdoc.rs @@ -0,0 +1,176 @@ +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn rustdoc_simple() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("rustdoc -v") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([CWD]) +[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ + -o [CWD]/target/doc \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn rustdoc_args() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("rustdoc -v -- --cfg=foo") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([CWD]) +[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ + -o [CWD]/target/doc \ + --cfg=foo \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn rustdoc_foo_with_bar_dependency() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "extern crate bar; pub fn foo() {}") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("rustdoc -v -- --cfg=foo") + .with_stderr( + "\ +[CHECKING] bar v0.0.1 ([..]) +[RUNNING] `rustc [..]bar/src/lib.rs [..]` +[DOCUMENTING] foo v0.0.1 ([CWD]) +[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ + -o [CWD]/target/doc \ + --cfg=foo \ + -L dependency=[CWD]/target/debug/deps \ + --extern [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn rustdoc_only_bar_dependency() { + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("src/lib.rs", "pub fn baz() {}") + .build(); + + foo.cargo("rustdoc -v -p bar -- --cfg=foo") + .with_stderr( + "\ +[DOCUMENTING] bar v0.0.1 ([..]) +[RUNNING] `rustdoc --crate-name bar [..]bar/src/lib.rs [..]\ + -o [CWD]/target/doc \ + --cfg=foo \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn rustdoc_same_name_documents_lib() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file("src/lib.rs", r#" "#) + .build(); + + p.cargo("rustdoc -v -- --cfg=foo") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([..]) +[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ + -o [CWD]/target/doc \ + --cfg=foo \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn features() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [features] + quux = [] + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("rustdoc --verbose --features quux") + .with_stderr_contains("[..]feature=[..]quux[..]") + .run(); +} + +#[cargo_test] +#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))] +fn rustdoc_target() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("rustdoc --verbose --target x86_64-unknown-linux-gnu") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([..]) +[RUNNING] `rustdoc --crate-name foo src/lib.rs [..]\ + --target x86_64-unknown-linux-gnu \ + -o [CWD]/target/x86_64-unknown-linux-gnu/doc \ + -L dependency=[CWD]/target/x86_64-unknown-linux-gnu/debug/deps \ + -L dependency=[CWD]/target/debug/deps` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", + ) + .run(); +} diff --git a/tests/testsuite/rustdocflags.rs b/tests/testsuite/rustdocflags.rs new file mode 100644 index 00000000000..cc8a8730af9 --- /dev/null +++ b/tests/testsuite/rustdocflags.rs @@ -0,0 +1,97 @@ +use crate::support::project; + +#[cargo_test] +fn parses_env() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("doc -v") + .env("RUSTDOCFLAGS", "--cfg=foo") + .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg=foo[..]`") + .run(); +} + +#[cargo_test] +fn parses_config() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + rustdocflags = ["--cfg", "foo"] + "#, + ) + .build(); + + p.cargo("doc -v") + .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg foo[..]`") + .run(); +} + +#[cargo_test] +fn bad_flags() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("doc") + .env("RUSTDOCFLAGS", "--bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn rerun() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").run(); + p.cargo("doc") + .env("RUSTDOCFLAGS", "--cfg=foo") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + p.cargo("doc") + .env("RUSTDOCFLAGS", "--cfg=bar") + .with_stderr( + "\ +[DOCUMENTING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn rustdocflags_passed_to_rustdoc_through_cargo_test() { + let p = project() + .file( + "src/lib.rs", + r#" + //! ``` + //! assert!(cfg!(do_not_choke)); + //! ``` + "#, + ) + .build(); + + p.cargo("test --doc") + .env("RUSTDOCFLAGS", "--cfg do_not_choke") + .run(); +} + +#[cargo_test] +fn rustdocflags_passed_to_rustdoc_through_cargo_test_only_once() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("test --doc") + .env("RUSTDOCFLAGS", "--markdown-no-toc") + .run(); +} + +#[cargo_test] +fn rustdocflags_misspelled() { + let p = project().file("src/main.rs", "fn main() { }").build(); + + p.cargo("doc") + .env("RUSTDOC_FLAGS", "foo") + .with_stderr_contains("[WARNING] Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?") + .run(); +} diff --git a/tests/testsuite/rustflags.rs b/tests/testsuite/rustflags.rs new file mode 100644 index 00000000000..008bee0f45a --- /dev/null +++ b/tests/testsuite/rustflags.rs @@ -0,0 +1,1395 @@ +use std::fs::{self, File}; +use std::io::Write; + +use crate::support::rustc_host; +use crate::support::{basic_lib_manifest, basic_manifest, paths, project, project_in_home}; + +#[cargo_test] +fn env_rustflags_normal_source() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + // Use RUSTFLAGS to pass an argument that will generate an error + p.cargo("build --lib") + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --bin=a") + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --example=b") + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("test") + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("bench") + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn env_rustflags_build_script() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is passed the build will fail. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#, + ) + .build(); + + p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); +} + +#[cargo_test] +fn env_rustflags_build_script_dep() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#, + ) + .build(); + + foo.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); +} + +#[cargo_test] +fn env_rustflags_plugin() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#, + ) + .build(); + + p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); +} + +#[cargo_test] +fn env_rustflags_plugin_dep() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "fn foo() {}") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_lib_manifest("bar")) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#, + ) + .build(); + + foo.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); +} + +#[cargo_test] +fn env_rustflags_normal_source_with_target() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .build(); + + let host = &rustc_host(); + + // Use RUSTFLAGS to pass an argument that will generate an error + p.cargo("build --lib --target") + .arg(host) + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --bin=a --target") + .arg(host) + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --example=b --target") + .arg(host) + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("test --target") + .arg(host) + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("bench --target") + .arg(host) + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn env_rustflags_build_script_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#, + ) + .build(); + + let host = rustc_host(); + p.cargo("build --target") + .arg(host) + .env("RUSTFLAGS", "--cfg foo") + .run(); +} + +#[cargo_test] +fn env_rustflags_build_script_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#, + ) + .build(); + + let host = rustc_host(); + foo.cargo("build --target") + .arg(host) + .env("RUSTFLAGS", "--cfg foo") + .run(); +} + +#[cargo_test] +fn env_rustflags_plugin_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#, + ) + .build(); + + let host = rustc_host(); + p.cargo("build --target") + .arg(host) + .env("RUSTFLAGS", "--cfg foo") + .run(); +} + +#[cargo_test] +fn env_rustflags_plugin_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "fn foo() {}") + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_lib_manifest("bar")) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#, + ) + .build(); + + let host = rustc_host(); + foo.cargo("build --target") + .arg(host) + .env("RUSTFLAGS", "--cfg foo") + .run(); +} + +#[cargo_test] +fn env_rustflags_recompile() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build").run(); + // Setting RUSTFLAGS forces a recompile + p.cargo("build") + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn env_rustflags_recompile2() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); + // Setting RUSTFLAGS forces a recompile + p.cargo("build") + .env("RUSTFLAGS", "-Z bogus") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn env_rustflags_no_recompile() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); + p.cargo("build") + .env("RUSTFLAGS", "--cfg foo") + .with_stdout("") + .run(); +} + +#[cargo_test] +fn build_rustflags_normal_source() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["-Z", "bogus"] + "#, + ) + .build(); + + p.cargo("build --lib") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --bin=a") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --example=b") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("test") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("bench") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn build_rustflags_build_script() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is passed the build will fail. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn build_rustflags_build_script_dep() { + // RUSTFLAGS should be passed to rustc for build scripts + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#, + ) + .build(); + + foo.cargo("build").run(); +} + +#[cargo_test] +fn build_rustflags_plugin() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + fn main() { } + #[cfg(not(foo))] + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn build_rustflags_plugin_dep() { + // RUSTFLAGS should be passed to rustc for plugins + // when --target is not specified. + // In this test if --cfg foo is not passed the build will fail. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "fn foo() {}") + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_lib_manifest("bar")) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(not(foo))] + fn bar() { } + "#, + ) + .build(); + + foo.cargo("build").run(); +} + +#[cargo_test] +fn build_rustflags_normal_source_with_target() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["-Z", "bogus"] + "#, + ) + .build(); + + let host = &rustc_host(); + + // Use RUSTFLAGS to pass an argument that will generate an error + p.cargo("build --lib --target") + .arg(host) + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --bin=a --target") + .arg(host) + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --example=b --target") + .arg(host) + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("test --target") + .arg(host) + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("bench --target") + .arg(host) + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn build_rustflags_build_script_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + let host = rustc_host(); + p.cargo("build --target").arg(host).run(); +} + +#[cargo_test] +fn build_rustflags_build_script_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for build scripts + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + + [build-dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#, + ) + .build(); + + let host = rustc_host(); + foo.cargo("build --target").arg(host).run(); +} + +#[cargo_test] +fn build_rustflags_plugin_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + "#, + ) + .file( + "src/lib.rs", + r#" + fn main() { } + #[cfg(foo)] + fn main() { } + "#, + ) + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + let host = rustc_host(); + p.cargo("build --target").arg(host).run(); +} + +#[cargo_test] +fn build_rustflags_plugin_dep_with_target() { + // RUSTFLAGS should not be passed to rustc for plugins + // when --target is specified. + // In this test if --cfg foo is passed the build will fail. + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + name = "foo" + plugin = true + + [dependencies.bar] + path = "../bar" + "#, + ) + .file("src/lib.rs", "fn foo() {}") + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + let _bar = project() + .at("bar") + .file("Cargo.toml", &basic_lib_manifest("bar")) + .file( + "src/lib.rs", + r#" + fn bar() { } + #[cfg(foo)] + fn bar() { } + "#, + ) + .build(); + + let host = rustc_host(); + foo.cargo("build --target").arg(host).run(); +} + +#[cargo_test] +fn build_rustflags_recompile() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build").run(); + + // Setting RUSTFLAGS forces a recompile + let config = r#" + [build] + rustflags = ["-Z", "bogus"] + "#; + let config_file = paths::root().join("foo/.cargo/config"); + fs::create_dir_all(config_file.parent().unwrap()).unwrap(); + let mut config_file = File::create(config_file).unwrap(); + config_file.write_all(config.as_bytes()).unwrap(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn build_rustflags_recompile2() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); + + // Setting RUSTFLAGS forces a recompile + let config = r#" + [build] + rustflags = ["-Z", "bogus"] + "#; + let config_file = paths::root().join("foo/.cargo/config"); + fs::create_dir_all(config_file.parent().unwrap()).unwrap(); + let mut config_file = File::create(config_file).unwrap(); + config_file.write_all(config.as_bytes()).unwrap(); + + p.cargo("build") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn build_rustflags_no_recompile() { + let p = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); + p.cargo("build") + .env("RUSTFLAGS", "--cfg foo") + .with_stdout("") + .run(); +} + +#[cargo_test] +fn build_rustflags_with_home_config() { + // We need a config file inside the home directory + let home = paths::home(); + let home_config = home.join(".cargo"); + fs::create_dir(&home_config).unwrap(); + File::create(&home_config.join("config")) + .unwrap() + .write_all( + br#" + [build] + rustflags = ["-Cllvm-args=-x86-asm-syntax=intel"] + "#, + ) + .unwrap(); + + // And we need the project to be inside the home directory + // so the walking process finds the home project twice. + let p = project_in_home("foo").file("src/lib.rs", "").build(); + + p.cargo("build -v").run(); +} + +#[cargo_test] +fn target_rustflags_normal_source() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + "benches/d.rs", + r#" + #![feature(test)] + extern crate test; + #[bench] fn run1(_ben: &mut test::Bencher) { }"#, + ) + .file( + ".cargo/config", + &format!( + " + [target.{}] + rustflags = [\"-Z\", \"bogus\"] + ", + rustc_host() + ), + ) + .build(); + + p.cargo("build --lib") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --bin=a") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --example=b") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("test") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("bench") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +// target.{}.rustflags takes precedence over build.rustflags +#[cargo_test] +fn target_rustflags_precedence() { + let p = project() + .file("src/lib.rs", "") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + ".cargo/config", + &format!( + " + [build] + rustflags = [\"--cfg\", \"foo\"] + + [target.{}] + rustflags = [\"-Z\", \"bogus\"] + ", + rustc_host() + ), + ) + .build(); + + p.cargo("build --lib") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --bin=a") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("build --example=b") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("test") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); + p.cargo("bench") + .with_status(101) + .with_stderr_contains("[..]bogus[..]") + .run(); +} + +#[cargo_test] +fn cfg_rustflags_normal_source() { + let p = project() + .file("src/lib.rs", "pub fn t() {}") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + ".cargo/config", + &format!( + r#" + [target.'cfg({})'] + rustflags = ["--cfg", "bar"] + "#, + if rustc_host().contains("-windows-") { + "windows" + } else { + "not(windows)" + } + ), + ) + .build(); + + p.cargo("build --lib -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build --bin=a -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build --example=b -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("test --no-run -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("bench --no-run -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] release [optimized] target(s) in [..] +", + ) + .run(); +} + +// target.'cfg(...)'.rustflags takes precedence over build.rustflags +#[cargo_test] +fn cfg_rustflags_precedence() { + let p = project() + .file("src/lib.rs", "pub fn t() {}") + .file("src/bin/a.rs", "fn main() {}") + .file("examples/b.rs", "fn main() {}") + .file("tests/c.rs", "#[test] fn f() { }") + .file( + ".cargo/config", + &format!( + r#" + [build] + rustflags = ["--cfg", "foo"] + + [target.'cfg({})'] + rustflags = ["--cfg", "bar"] + "#, + if rustc_host().contains("-windows-") { + "windows" + } else { + "not(windows)" + } + ), + ) + .build(); + + p.cargo("build --lib -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build --bin=a -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build --example=b -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("test --no-run -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("bench --no-run -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[RUNNING] `rustc [..] --cfg bar[..]` +[FINISHED] release [optimized] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn target_rustflags_string_and_array_form1() { + let p1 = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + rustflags = ["--cfg", "foo"] + "#, + ) + .build(); + + p1.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + let p2 = project() + .file("src/lib.rs", "") + .file( + ".cargo/config", + r#" + [build] + rustflags = "--cfg foo" + "#, + ) + .build(); + + p2.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn target_rustflags_string_and_array_form2() { + let p1 = project() + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + rustflags = ["--cfg", "foo"] + "#, + rustc_host() + ), + ) + .file("src/lib.rs", "") + .build(); + + p1.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + let p2 = project() + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + rustflags = "--cfg foo" + "#, + rustc_host() + ), + ) + .file("src/lib.rs", "") + .build(); + + p2.cargo("build -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] --cfg foo[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn two_matching_in_config() { + let p1 = project() + .file( + ".cargo/config", + r#" + [target.'cfg(unix)'] + rustflags = ["--cfg", 'foo="a"'] + [target.'cfg(windows)'] + rustflags = ["--cfg", 'foo="a"'] + [target.'cfg(target_pointer_width = "32")'] + rustflags = ["--cfg", 'foo="b"'] + [target.'cfg(target_pointer_width = "64")'] + rustflags = ["--cfg", 'foo="b"'] + "#, + ) + .file( + "src/main.rs", + r#" + fn main() { + if cfg!(foo = "a") { + println!("a"); + } else if cfg!(foo = "b") { + println!("b"); + } else { + panic!() + } + } + "#, + ) + .build(); + + p1.cargo("run").run(); + p1.cargo("build").with_stderr("[FINISHED] [..]").run(); +} + +#[cargo_test] +fn env_rustflags_misspelled() { + let p = project().file("src/main.rs", "fn main() { }").build(); + + for cmd in &["check", "build", "run", "test", "bench"] { + p.cargo(cmd) + .env("RUST_FLAGS", "foo") + .with_stderr_contains("[WARNING] Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?") + .run(); + } +} + +#[cargo_test] +fn env_rustflags_misspelled_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + build = "build.rs" + "#, + ) + .file("src/lib.rs", "") + .file("build.rs", "fn main() { }") + .build(); + + p.cargo("build") + .env("RUST_FLAGS", "foo") + .with_stderr_contains("[WARNING] Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?") + .run(); +} + +#[cargo_test] +fn remap_path_prefix_ignored() { + // Ensure that --remap-path-prefix does not affect metadata hash. + let p = project().file("src/lib.rs", "").build(); + p.cargo("build").run(); + let rlibs = p + .glob("target/debug/deps/*.rlib") + .collect::, _>>() + .unwrap(); + assert_eq!(rlibs.len(), 1); + p.cargo("clean").run(); + + let check_metadata_same = || { + let rlibs2 = p + .glob("target/debug/deps/*.rlib") + .collect::, _>>() + .unwrap(); + assert_eq!(rlibs, rlibs2); + }; + + p.cargo("build") + .env( + "RUSTFLAGS", + "--remap-path-prefix=/abc=/zoo --remap-path-prefix /spaced=/zoo", + ) + .run(); + check_metadata_same(); + + p.cargo("clean").run(); + p.cargo("rustc -- --remap-path-prefix=/abc=/zoo --remap-path-prefix /spaced=/zoo") + .run(); + check_metadata_same(); +} diff --git a/tests/testsuite/search.rs b/tests/testsuite/search.rs new file mode 100644 index 00000000000..8cd0d7a107d --- /dev/null +++ b/tests/testsuite/search.rs @@ -0,0 +1,216 @@ +use std::collections::HashSet; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::Path; + +use crate::support::cargo_process; +use crate::support::git::repo; +use crate::support::paths; +use crate::support::registry::{api_path, registry_path, registry_url}; +use url::Url; + +fn api() -> Url { + Url::from_file_path(&*api_path()).ok().unwrap() +} + +fn write_crates(dest: &Path) { + let content = r#"{ + "crates": [{ + "created_at": "2014-11-16T20:17:35Z", + "description": "Design by contract style assertions for Rust", + "documentation": null, + "downloads": 2, + "homepage": null, + "id": "hoare", + "keywords": [], + "license": null, + "links": { + "owners": "/api/v1/crates/hoare/owners", + "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", + "version_downloads": "/api/v1/crates/hoare/downloads", + "versions": "/api/v1/crates/hoare/versions" + }, + "max_version": "0.1.1", + "name": "hoare", + "repository": "https://github.com/nick29581/libhoare", + "updated_at": "2014-11-20T21:49:21Z", + "versions": null + }], + "meta": { + "total": 1 + } + }"#; + + // Older versions of curl don't peel off query parameters when looking for + // filenames, so just make both files. + // + // On windows, though, `?` is an invalid character, but we always build curl + // from source there anyway! + File::create(&dest) + .unwrap() + .write_all(content.as_bytes()) + .unwrap(); + if !cfg!(windows) { + File::create(&dest.with_file_name("crates?q=postgres&per_page=10")) + .unwrap() + .write_all(content.as_bytes()) + .unwrap(); + } +} + +fn setup() { + let cargo_home = paths::root().join(".cargo"); + fs::create_dir_all(cargo_home).unwrap(); + fs::create_dir_all(&api_path().join("api/v1")).unwrap(); + + // Init a new registry + let _ = repo(®istry_path()) + .file( + "config.json", + &format!(r#"{{"dl":"{0}","api":"{0}"}}"#, api()), + ) + .build(); + + let base = api_path().join("api/v1/crates"); + write_crates(&base); +} + +fn set_cargo_config() { + let config = paths::root().join(".cargo/config"); + + File::create(&config) + .unwrap() + .write_all( + format!( + r#" +[source.crates-io] +registry = 'https://wut' +replace-with = 'dummy-registry' + +[source.dummy-registry] +registry = '{reg}' +"#, + reg = registry_url(), + ) + .as_bytes(), + ) + .unwrap(); +} + +#[cargo_test] +fn not_update() { + setup(); + set_cargo_config(); + + use cargo::core::{Shell, Source, SourceId}; + use cargo::sources::RegistrySource; + use cargo::util::Config; + + let sid = SourceId::for_registry(®istry_url()).unwrap(); + let cfg = Config::new(Shell::new(), paths::root(), paths::home().join(".cargo")); + let lock = cfg.acquire_package_cache_lock().unwrap(); + let mut regsrc = RegistrySource::remote(sid, &HashSet::new(), &cfg); + regsrc.update().unwrap(); + drop(lock); + + cargo_process("search postgres") + .with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust") + .with_stderr("") // without "Updating ... index" + .run(); +} + +#[cargo_test] +fn replace_default() { + setup(); + set_cargo_config(); + + cargo_process("search postgres") + .with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust") + .with_stderr_contains("[..]Updating [..] index") + .run(); +} + +#[cargo_test] +fn simple() { + setup(); + + cargo_process("search postgres --index") + .arg(registry_url().to_string()) + .with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust") + .run(); +} + +// TODO: Deprecated +// remove once it has been decided '--host' can be safely removed +#[cargo_test] +fn simple_with_host() { + setup(); + + cargo_process("search postgres --host") + .arg(registry_url().to_string()) + .with_stderr( + "\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] `[CWD]/registry` index +", + ) + .with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust") + .run(); +} + +// TODO: Deprecated +// remove once it has been decided '--host' can be safely removed +#[cargo_test] +fn simple_with_index_and_host() { + setup(); + + cargo_process("search postgres --index") + .arg(registry_url().to_string()) + .arg("--host") + .arg(registry_url().to_string()) + .with_stderr( + "\ +[WARNING] The flag '--host' is no longer valid. + +Previous versions of Cargo accepted this flag, but it is being +deprecated. The flag is being renamed to 'index', as the flag +wants the location of the index. Please use '--index' instead. + +This will soon become a hard error, so it's either recommended +to update to a fixed version or contact the upstream maintainer +about this warning. +[UPDATING] `[CWD]/registry` index +", + ) + .with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust") + .run(); +} + +#[cargo_test] +fn multiple_query_params() { + setup(); + + cargo_process("search postgres sql --index") + .arg(registry_url().to_string()) + .with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust") + .run(); +} + +#[cargo_test] +fn help() { + cargo_process("search -h").run(); + cargo_process("help search").run(); + // Ensure that help output goes to stdout, not stderr. + cargo_process("search --help").with_stderr("").run(); + cargo_process("search --help") + .with_stdout_contains("[..] --frozen [..]") + .run(); +} diff --git a/tests/testsuite/shell_quoting.rs b/tests/testsuite/shell_quoting.rs new file mode 100644 index 00000000000..410f95b365c --- /dev/null +++ b/tests/testsuite/shell_quoting.rs @@ -0,0 +1,37 @@ +//! this file tests that when the commands being run are shown +//! in the output, their arguments are quoted properly +//! so that the command can be run in a terminal + +use crate::support::project; + +#[cargo_test] +fn features_are_quoted() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = ["mikeyhew@example.com"] + + [features] + some_feature = [] + default = ["some_feature"] + "#, + ) + .file("src/main.rs", "fn main() {error}") + .build(); + + p.cargo("check -v") + .env("MSYSTEM", "1") + .with_status(101) + .with_stderr_contains( + r#"[RUNNING] `rustc [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]`"# + ).with_stderr_contains( + r#" +Caused by: + process didn't exit successfully: [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]"# + ) + .run(); +} diff --git a/tests/testsuite/small_fd_limits.rs b/tests/testsuite/small_fd_limits.rs new file mode 100644 index 00000000000..27558a8657f --- /dev/null +++ b/tests/testsuite/small_fd_limits.rs @@ -0,0 +1,117 @@ +use std::env; +use std::ffi::OsStr; +use std::path::PathBuf; +use std::process::Command; + +use crate::support::git; +use crate::support::paths; +use crate::support::project; +use crate::support::registry::Package; +use git2; + +use url::Url; + +fn find_index() -> PathBuf { + let dir = paths::home().join(".cargo/registry/index"); + dir.read_dir().unwrap().next().unwrap().unwrap().path() +} + +fn run_test(path_env: Option<&OsStr>) { + const N: usize = 50; + + let foo = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/lib.rs", "") + .build(); + Package::new("bar", "0.1.0").publish(); + + foo.cargo("build").run(); + + let index = find_index(); + let path = paths::home().join("tmp"); + let url = Url::from_file_path(&path).unwrap().to_string(); + let repo = git2::Repository::init(&path).unwrap(); + let index = git2::Repository::open(&index).unwrap(); + let mut cfg = repo.config().unwrap(); + cfg.set_str("user.email", "foo@bar.com").unwrap(); + cfg.set_str("user.name", "Foo Bar").unwrap(); + let mut cfg = index.config().unwrap(); + cfg.set_str("user.email", "foo@bar.com").unwrap(); + cfg.set_str("user.name", "Foo Bar").unwrap(); + + for _ in 0..N { + git::commit(&repo); + index + .remote_anonymous(&url) + .unwrap() + .fetch(&["refs/heads/master:refs/remotes/foo/master"], None, None) + .unwrap(); + } + drop((repo, index)); + Package::new("bar", "0.1.1").publish(); + + let before = find_index() + .join(".git/objects/pack") + .read_dir() + .unwrap() + .count(); + assert!(before > N); + + let mut cmd = foo.cargo("update"); + cmd.env("__CARGO_PACKFILE_LIMIT", "10"); + if let Some(path) = path_env { + cmd.env("PATH", path); + } + cmd.env("CARGO_LOG", "trace"); + cmd.run(); + let after = find_index() + .join(".git/objects/pack") + .read_dir() + .unwrap() + .count(); + assert!( + after < before, + "packfiles before: {}\n\ + packfiles after: {}", + before, + after + ); +} + +#[cargo_test] +fn use_git_gc() { + if Command::new("git").arg("--version").output().is_err() { + return; + } + run_test(None); +} + +#[cargo_test] +// it looks like this test passes on some windows machines but not others, +// notably not on AppVeyor's machines. Sounds like another but for another day. +#[cfg_attr(windows, ignore)] +fn avoid_using_git() { + let path = env::var_os("PATH").unwrap_or_default(); + let mut paths = env::split_paths(&path).collect::>(); + let idx = paths + .iter() + .position(|p| p.join("git").exists() || p.join("git.exe").exists()); + match idx { + Some(i) => { + paths.remove(i); + } + None => return, + } + run_test(Some(&env::join_paths(&paths).unwrap())); +} diff --git a/tests/testsuite/support/cross_compile.rs b/tests/testsuite/support/cross_compile.rs new file mode 100644 index 00000000000..ac072822dba --- /dev/null +++ b/tests/testsuite/support/cross_compile.rs @@ -0,0 +1,138 @@ +use std::env; +use std::process::Command; +use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Once; + +use crate::support::{basic_bin_manifest, main_file, project}; + +pub fn disabled() -> bool { + // First, disable if `./configure` requested so. + match env::var("CFG_DISABLE_CROSS_TESTS") { + Ok(ref s) if *s == "1" => return true, + _ => {} + } + + // Right now, the Windows bots cannot cross compile due to the Mingw setup, + // so we disable ourselves on all but macOS/Linux setups where the rustc + // install script ensures we have both architectures. + if !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) { + return true; + } + + // It's not particularly common to have a cross-compilation setup, so + // try to detect that before we fail a bunch of tests through no fault + // of the user. + static CAN_RUN_CROSS_TESTS: AtomicBool = AtomicBool::new(false); + static CHECK: Once = Once::new(); + + let cross_target = alternate(); + + CHECK.call_once(|| { + let p = project() + .at("cross_test") + .file("Cargo.toml", &basic_bin_manifest("cross_test")) + .file("src/cross_test.rs", &main_file(r#""testing!""#, &[])) + .build(); + + let result = p + .cargo("build --target") + .arg(&cross_target) + .exec_with_output(); + + if result.is_ok() { + CAN_RUN_CROSS_TESTS.store(true, Ordering::SeqCst); + } + }); + + if CAN_RUN_CROSS_TESTS.load(Ordering::SeqCst) { + // We were able to compile a simple project, so the user has the + // necessary `std::` bits installed. Therefore, tests should not + // be disabled. + return false; + } + + // We can't compile a simple cross project. We want to warn the user + // by failing a single test and having the remainder of the cross tests + // pass. We don't use `std::sync::Once` here because panicking inside its + // `call_once` method would poison the `Once` instance, which is not what + // we want. + static HAVE_WARNED: AtomicBool = AtomicBool::new(false); + + if HAVE_WARNED.swap(true, Ordering::SeqCst) { + // We are some other test and somebody else is handling the warning. + // Just disable the current test. + return true; + } + + // We are responsible for warning the user, which we do by panicking. + let rustup_available = Command::new("rustup").output().is_ok(); + + let linux_help = if cfg!(target_os = "linux") { + " + +You may need to install runtime libraries for your Linux distribution as well." + .to_string() + } else { + "".to_string() + }; + + let rustup_help = if rustup_available { + format!( + " + +Alternatively, you can install the necessary libraries for cross-compilation with + + rustup target add {}{}", + cross_target, linux_help + ) + } else { + "".to_string() + }; + + panic!( + "Cannot cross compile to {}. + +This failure can be safely ignored. If you would prefer to not see this +failure, you can set the environment variable CFG_DISABLE_CROSS_TESTS to \"1\".{} +", + cross_target, rustup_help + ); +} + +pub fn alternate() -> String { + let platform = match env::consts::OS { + "linux" => "unknown-linux-gnu", + "macos" => "apple-darwin", + "windows" => "pc-windows-msvc", + _ => unreachable!(), + }; + let arch = match env::consts::ARCH { + "x86" => "x86_64", + "x86_64" => "i686", + _ => unreachable!(), + }; + format!("{}-{}", arch, platform) +} + +pub fn alternate_arch() -> &'static str { + match env::consts::ARCH { + "x86" => "x86_64", + "x86_64" => "x86", + _ => unreachable!(), + } +} + +pub fn host() -> String { + let platform = match env::consts::OS { + "linux" => "unknown-linux-gnu", + "macos" => "apple-darwin", + "windows" => "pc-windows-msvc", + _ => unreachable!(), + }; + let arch = match env::consts::ARCH { + "x86" => "i686", + "x86_64" => "x86_64", + _ => unreachable!(), + }; + format!("{}-{}", arch, platform) +} diff --git a/tests/testsuite/support/git.rs b/tests/testsuite/support/git.rs new file mode 100644 index 00000000000..2853417a060 --- /dev/null +++ b/tests/testsuite/support/git.rs @@ -0,0 +1,224 @@ +/* +# Git Testing Support + +## Creating a git dependency +`git::new()` is an easy way to create a new git repository containing a +project that you can then use as a dependency. It will automatically add all +the files you specify in the project and commit them to the repository. +Example: + +``` +let git_project = git::new("dep1", |project| { + project + .file("Cargo.toml", &basic_manifest("dep1")) + .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#) +}).unwrap(); + +// Use the `url()` method to get the file url to the new repository. +let p = project() + .file("Cargo.toml", &format!(r#" + [package] + name = "a" + version = "1.0.0" + + [dependencies] + dep1 = {{ git = '{}' }} + "#, git_project.url())) + .file("src/lib.rs", "extern crate dep1;") + .build(); +``` + +## Manually creating repositories +`git::repo()` can be used to create a `RepoBuilder` which provides a way of +adding files to a blank repository and committing them. + +If you want to then manipulate the repository (such as adding new files or +tags), you can use `git2::Repository::open()` to open the repository and then +use some of the helper functions in this file to interact with the repository. + +*/ + +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; + +use cargo::util::ProcessError; +use git2; +use url::Url; + +use crate::support::{path2url, project, Project, ProjectBuilder}; + +#[must_use] +pub struct RepoBuilder { + repo: git2::Repository, + files: Vec, +} + +pub struct Repository(git2::Repository); + +/// Create a `RepoBuilder` to build a new git repository. +/// +/// Call `build()` to finalize and create the repository. +pub fn repo(p: &Path) -> RepoBuilder { + RepoBuilder::init(p) +} + +impl RepoBuilder { + pub fn init(p: &Path) -> RepoBuilder { + t!(fs::create_dir_all(p.parent().unwrap())); + let repo = t!(git2::Repository::init(p)); + { + let mut config = t!(repo.config()); + t!(config.set_str("user.name", "name")); + t!(config.set_str("user.email", "email")); + } + RepoBuilder { + repo, + files: Vec::new(), + } + } + + /// Add a file to the repository. + pub fn file(self, path: &str, contents: &str) -> RepoBuilder { + let mut me = self.nocommit_file(path, contents); + me.files.push(PathBuf::from(path)); + me + } + + /// Add a file that will be left in the working directory, but not added + /// to the repository. + pub fn nocommit_file(self, path: &str, contents: &str) -> RepoBuilder { + let dst = self.repo.workdir().unwrap().join(path); + t!(fs::create_dir_all(dst.parent().unwrap())); + t!(t!(File::create(&dst)).write_all(contents.as_bytes())); + self + } + + /// Create the repository and commit the new files. + pub fn build(self) -> Repository { + { + let mut index = t!(self.repo.index()); + for file in self.files.iter() { + t!(index.add_path(file)); + } + t!(index.write()); + let id = t!(index.write_tree()); + let tree = t!(self.repo.find_tree(id)); + let sig = t!(self.repo.signature()); + t!(self + .repo + .commit(Some("HEAD"), &sig, &sig, "Initial commit", &tree, &[])); + } + let RepoBuilder { repo, .. } = self; + Repository(repo) + } +} + +impl Repository { + pub fn root(&self) -> &Path { + self.0.workdir().unwrap() + } + + pub fn url(&self) -> Url { + path2url(self.0.workdir().unwrap().to_path_buf()) + } + + pub fn revparse_head(&self) -> String { + self.0 + .revparse_single("HEAD") + .expect("revparse HEAD") + .id() + .to_string() + } +} + +/// Create a new git repository with a project. +pub fn new(name: &str, callback: F) -> Result +where + F: FnOnce(ProjectBuilder) -> ProjectBuilder, +{ + let mut git_project = project().at(name); + git_project = callback(git_project); + let git_project = git_project.build(); + + let repo = t!(git2::Repository::init(&git_project.root())); + let mut cfg = t!(repo.config()); + t!(cfg.set_str("user.email", "foo@bar.com")); + t!(cfg.set_str("user.name", "Foo Bar")); + drop(cfg); + add(&repo); + commit(&repo); + Ok(git_project) +} + +/// Add all files in the working directory to the git index. +pub fn add(repo: &git2::Repository) { + // FIXME(libgit2/libgit2#2514): apparently, `add_all` will add all submodules + // as well, and then fail because they're directories. As a stop-gap, we just + // ignore all submodules. + let mut s = t!(repo.submodules()); + for submodule in s.iter_mut() { + t!(submodule.add_to_index(false)); + } + let mut index = t!(repo.index()); + t!(index.add_all( + ["*"].iter(), + git2::IndexAddOption::DEFAULT, + Some( + &mut (|a, _b| if s.iter().any(|s| a.starts_with(s.path())) { + 1 + } else { + 0 + }) + ) + )); + t!(index.write()); +} + +/// Add a git submodule to the repository. +pub fn add_submodule<'a>( + repo: &'a git2::Repository, + url: &str, + path: &Path, +) -> git2::Submodule<'a> { + let path = path.to_str().unwrap().replace(r"\", "/"); + let mut s = t!(repo.submodule(url, Path::new(&path), false)); + let subrepo = t!(s.open()); + t!(subrepo.remote_add_fetch("origin", "refs/heads/*:refs/heads/*")); + let mut origin = t!(subrepo.find_remote("origin")); + t!(origin.fetch(&[], None, None)); + t!(subrepo.checkout_head(None)); + t!(s.add_finalize()); + s +} + +/// Commit changes to the git repository. +pub fn commit(repo: &git2::Repository) -> git2::Oid { + let tree_id = t!(t!(repo.index()).write_tree()); + let sig = t!(repo.signature()); + let mut parents = Vec::new(); + if let Some(parent) = repo.head().ok().map(|h| h.target().unwrap()) { + parents.push(t!(repo.find_commit(parent))) + } + let parents = parents.iter().collect::>(); + t!(repo.commit( + Some("HEAD"), + &sig, + &sig, + "test", + &t!(repo.find_tree(tree_id)), + &parents + )) +} + +/// Create a new tag in the git repository. +pub fn tag(repo: &git2::Repository, name: &str) { + let head = repo.head().unwrap().target().unwrap(); + t!(repo.tag( + name, + &t!(repo.find_object(head, None)), + &t!(repo.signature()), + "make a new tag", + false + )); +} diff --git a/tests/testsuite/support/install.rs b/tests/testsuite/support/install.rs new file mode 100644 index 00000000000..8e5bbd073bc --- /dev/null +++ b/tests/testsuite/support/install.rs @@ -0,0 +1,28 @@ +use std::env::consts::EXE_SUFFIX; +use std::path::{Path, PathBuf}; + +use crate::support::paths; + +/// Used by `cargo install` tests to assert an executable binary +/// has been installed. Example usage: +/// +/// assert_has_installed_exe(cargo_home(), "foo"); +pub fn assert_has_installed_exe>(path: P, name: &'static str) { + assert!(check_has_installed_exe(path, name)); +} + +pub fn assert_has_not_installed_exe>(path: P, name: &'static str) { + assert!(!check_has_installed_exe(path, name)); +} + +fn check_has_installed_exe>(path: P, name: &'static str) -> bool { + path.as_ref().join("bin").join(exe(name)).is_file() +} + +pub fn cargo_home() -> PathBuf { + paths::home().join(".cargo") +} + +pub fn exe(name: &str) -> String { + format!("{}{}", name, EXE_SUFFIX) +} diff --git a/tests/testsuite/support/mod.rs b/tests/testsuite/support/mod.rs new file mode 100644 index 00000000000..01abfdba550 --- /dev/null +++ b/tests/testsuite/support/mod.rs @@ -0,0 +1,1803 @@ +/* +# Introduction to `support`. + +Cargo has a wide variety of integration tests that execute the `cargo` binary +and verify its behavior. The `support` module contains many helpers to make +this process easy. + +The general form of a test involves creating a "project", running cargo, and +checking the result. Projects are created with the `ProjectBuilder` where you +specify some files to create. The general form looks like this: + +``` +let p = project() + .file("src/main.rs", r#"fn main() { println!("hi!"); }"#) + .build(); +``` + +If you do not specify a `Cargo.toml` manifest using `file()`, one is +automatically created with a project name of `foo` using `basic_manifest()`. + +To run cargo, call the `cargo` method and make assertions on the execution: + +``` +p.cargo("run --bin foo") + .with_stderr( + "\ +[COMPILING] foo [..] +[FINISHED] [..] +[RUNNING] `target/debug/foo` +", + ) + .with_stdout("hi!") + .run(); +``` + +The project creates a mini sandbox under the "cargo integration test" +directory with each test getting a separate directory such as +`/path/to/cargo/target/cit/t123/`. Each project appears as a separate +directory. There is also an empty `home` directory created that will be used +as a home directory instead of your normal home directory. + +See `support::lines_match` for an explanation of the string pattern matching. + +Browse the `pub` functions in the `support` module for a variety of other +helpful utilities. + +## Testing Nightly Features + +If you are testing a Cargo feature that only works on "nightly" cargo, then +you need to call `masquerade_as_nightly_cargo` on the process builder like +this: + +``` +p.cargo("build").masquerade_as_nightly_cargo() +``` + +If you are testing a feature that only works on *nightly rustc* (such as +benchmarks), then you should exit the test if it is not running with nightly +rust, like this: + +``` +if !is_nightly() { + // Add a comment here explaining why this is necessary. + return; +} +``` + +## Platform-specific Notes + +When checking output, use `/` for paths even on Windows: the actual output +of `\` on Windows will be replaced with `/`. + +Be careful when executing binaries on Windows. You should not rename, delete, +or overwrite a binary immediately after running it. Under some conditions +Windows will fail with errors like "directory not empty" or "failed to remove" +or "access is denied". + +## Specifying Dependencies + +You should not write any tests that use the network such as contacting +crates.io. Typically, simple path dependencies are the easiest way to add a +dependency. Example: + +``` +let p = project() + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "1.0.0" + + [dependencies] + bar = {path = "bar"} + "#) + .file("src/lib.rs", "extern crate bar;") + .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) + .file("bar/src/lib.rs", "") + .build(); +``` + +If you need to test with registry dependencies, see +`support::registry::Package` for creating packages you can depend on. + +If you need to test git dependencies, see `support::git` to create a git +dependency. + +*/ + +use std::env; +use std::ffi::OsStr; +use std::fmt; +use std::fs; +use std::io::prelude::*; +use std::os; +use std::path::{Path, PathBuf}; +use std::process::{Command, Output}; +use std::str; +use std::time::{self, Duration}; +use std::usize; + +use cargo; +use cargo::util::{is_ci, CargoResult, ProcessBuilder, ProcessError, Rustc}; +use filetime; +use serde_json::{self, Value}; +use url::Url; + +use self::paths::CargoPathExt; + +macro_rules! t { + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; +} + +pub mod cross_compile; +pub mod git; +pub mod paths; +pub mod publish; +pub mod registry; + +/* + * + * ===== Builders ===== + * + */ + +#[derive(PartialEq, Clone)] +struct FileBuilder { + path: PathBuf, + body: String, +} + +impl FileBuilder { + pub fn new(path: PathBuf, body: &str) -> FileBuilder { + FileBuilder { + path, + body: body.to_string(), + } + } + + fn mk(&self) { + self.dirname().mkdir_p(); + + let mut file = fs::File::create(&self.path) + .unwrap_or_else(|e| panic!("could not create file {}: {}", self.path.display(), e)); + + t!(file.write_all(self.body.as_bytes())); + } + + fn dirname(&self) -> &Path { + self.path.parent().unwrap() + } +} + +#[derive(PartialEq, Clone)] +struct SymlinkBuilder { + dst: PathBuf, + src: PathBuf, +} + +impl SymlinkBuilder { + pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { + SymlinkBuilder { dst, src } + } + + #[cfg(unix)] + fn mk(&self) { + self.dirname().mkdir_p(); + t!(os::unix::fs::symlink(&self.dst, &self.src)); + } + + #[cfg(windows)] + fn mk(&self) { + self.dirname().mkdir_p(); + t!(os::windows::fs::symlink_file(&self.dst, &self.src)); + } + + fn dirname(&self) -> &Path { + self.src.parent().unwrap() + } +} + +pub struct Project { + root: PathBuf, +} + +#[must_use] +pub struct ProjectBuilder { + root: Project, + files: Vec, + symlinks: Vec, + no_manifest: bool, +} + +impl ProjectBuilder { + /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo` + pub fn root(&self) -> PathBuf { + self.root.root() + } + + /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug` + pub fn target_debug_dir(&self) -> PathBuf { + self.root.target_debug_dir() + } + + pub fn new(root: PathBuf) -> ProjectBuilder { + ProjectBuilder { + root: Project { root }, + files: vec![], + symlinks: vec![], + no_manifest: false, + } + } + + pub fn at>(mut self, path: P) -> Self { + self.root = Project { + root: paths::root().join(path), + }; + self + } + + /// Adds a file to the project. + pub fn file>(mut self, path: B, body: &str) -> Self { + self._file(path.as_ref(), body); + self + } + + fn _file(&mut self, path: &Path, body: &str) { + self.files + .push(FileBuilder::new(self.root.root().join(path), body)); + } + + /// Adds a symlink to the project. + pub fn symlink>(mut self, dst: T, src: T) -> Self { + self.symlinks.push(SymlinkBuilder::new( + self.root.root().join(dst), + self.root.root().join(src), + )); + self + } + + pub fn no_manifest(mut self) -> Self { + self.no_manifest = true; + self + } + + /// Creates the project. + pub fn build(mut self) -> Project { + // First, clean the directory if it already exists + self.rm_root(); + + // Create the empty directory + self.root.root().mkdir_p(); + + let manifest_path = self.root.root().join("Cargo.toml"); + if !self.no_manifest && self.files.iter().all(|fb| fb.path != manifest_path) { + self._file(Path::new("Cargo.toml"), &basic_manifest("foo", "0.0.1")) + } + + let past = time::SystemTime::now() - Duration::new(1, 0); + let ftime = filetime::FileTime::from_system_time(past); + + for file in self.files.iter() { + file.mk(); + if is_coarse_mtime() { + // Place the entire project 1 second in the past to ensure + // that if cargo is called multiple times, the 2nd call will + // see targets as "fresh". Without this, if cargo finishes in + // under 1 second, the second call will see the mtime of + // source == mtime of output and consider it dirty. + filetime::set_file_times(&file.path, ftime, ftime).unwrap(); + } + } + + for symlink in self.symlinks.iter() { + symlink.mk(); + } + + let ProjectBuilder { root, .. } = self; + root + } + + fn rm_root(&self) { + self.root.root().rm_rf() + } +} + +impl Project { + /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo` + pub fn root(&self) -> PathBuf { + self.root.clone() + } + + /// Project's target dir, ex: `/path/to/cargo/target/cit/t0/foo/target` + pub fn build_dir(&self) -> PathBuf { + self.root().join("target") + } + + /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug` + pub fn target_debug_dir(&self) -> PathBuf { + self.build_dir().join("debug") + } + + /// File url for root, ex: `file:///path/to/cargo/target/cit/t0/foo` + pub fn url(&self) -> Url { + path2url(self.root()) + } + + /// Path to an example built as a library. + /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" + /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.rlib` + pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf { + self.target_debug_dir() + .join("examples") + .join(paths::get_lib_filename(name, kind)) + } + + /// Path to a debug binary. + /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/foo` + pub fn bin(&self, b: &str) -> PathBuf { + self.build_dir() + .join("debug") + .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) + } + + /// Path to a release binary. + /// ex: `/path/to/cargo/target/cit/t0/foo/target/release/foo` + pub fn release_bin(&self, b: &str) -> PathBuf { + self.build_dir() + .join("release") + .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) + } + + /// Path to a debug binary for a specific target triple. + /// ex: `/path/to/cargo/target/cit/t0/foo/target/i686-apple-darwin/debug/foo` + pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { + self.build_dir().join(target).join("debug").join(&format!( + "{}{}", + b, + env::consts::EXE_SUFFIX + )) + } + + /// Returns an iterator of paths matching the glob pattern, which is + /// relative to the project root. + pub fn glob>(&self, pattern: P) -> glob::Paths { + let pattern = self.root().join(pattern); + glob::glob(pattern.to_str().expect("failed to convert pattern to str")) + .expect("failed to glob") + } + + /// Changes the contents of an existing file. + pub fn change_file(&self, path: &str, body: &str) { + FileBuilder::new(self.root().join(path), body).mk() + } + + /// Creates a `ProcessBuilder` to run a program in the project + /// and wrap it in an Execs to assert on the execution. + /// Example: + /// p.process(&p.bin("foo")) + /// .with_stdout("bar\n") + /// .run(); + pub fn process>(&self, program: T) -> Execs { + let mut p = crate::support::process(program); + p.cwd(self.root()); + execs().with_process_builder(p) + } + + /// Creates a `ProcessBuilder` to run cargo. + /// Arguments can be separated by spaces. + /// Example: + /// p.cargo("build --bin foo").run(); + pub fn cargo(&self, cmd: &str) -> Execs { + let mut execs = self.process(&cargo_exe()); + if let Some(ref mut p) = execs.process_builder { + split_and_add_args(p, cmd); + } + execs + } + + /// Safely run a process after `cargo build`. + /// + /// Windows has a problem where a process cannot be reliably + /// be replaced, removed, or renamed immediately after executing it. + /// The action may fail (with errors like Access is denied), or + /// it may succeed, but future attempts to use the same filename + /// will fail with "Already Exists". + /// + /// If you have a test that needs to do `cargo run` multiple + /// times, you should instead use `cargo build` and use this + /// method to run the executable. Each time you call this, + /// use a new name for `dst`. + /// See rust-lang/cargo#5481. + pub fn rename_run(&self, src: &str, dst: &str) -> Execs { + let src = self.bin(src); + let dst = self.bin(dst); + fs::rename(&src, &dst) + .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e)); + self.process(dst) + } + + /// Returns the contents of `Cargo.lock`. + pub fn read_lockfile(&self) -> String { + self.read_file("Cargo.lock") + } + + /// Returns the contents of a path in the project root + pub fn read_file(&self, path: &str) -> String { + let mut buffer = String::new(); + fs::File::open(self.root().join(path)) + .unwrap() + .read_to_string(&mut buffer) + .unwrap(); + buffer + } + + /// Modifies `Cargo.toml` to remove all commented lines. + pub fn uncomment_root_manifest(&self) { + let mut contents = String::new(); + fs::File::open(self.root().join("Cargo.toml")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + fs::File::create(self.root().join("Cargo.toml")) + .unwrap() + .write_all(contents.replace("#", "").as_bytes()) + .unwrap(); + } + + pub fn symlink(&self, src: impl AsRef, dst: impl AsRef) { + let src = self.root().join(src.as_ref()); + let dst = self.root().join(dst.as_ref()); + #[cfg(unix)] + { + if let Err(e) = os::unix::fs::symlink(&src, &dst) { + panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); + } + } + #[cfg(windows)] + { + if src.is_dir() { + if let Err(e) = os::windows::fs::symlink_dir(&src, &dst) { + panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); + } + } else { + if let Err(e) = os::windows::fs::symlink_file(&src, &dst) { + panic!("failed to symlink {:?} to {:?}: {:?}", src, dst, e); + } + } + } + } +} + +// Generates a project layout +pub fn project() -> ProjectBuilder { + ProjectBuilder::new(paths::root().join("foo")) +} + +// Generates a project layout inside our fake home dir +pub fn project_in_home(name: &str) -> ProjectBuilder { + ProjectBuilder::new(paths::home().join(name)) +} + +// === Helpers === + +pub fn main_file(println: &str, deps: &[&str]) -> String { + let mut buf = String::new(); + + for dep in deps.iter() { + buf.push_str(&format!("extern crate {};\n", dep)); + } + + buf.push_str("fn main() { println!("); + buf.push_str(println); + buf.push_str("); }\n"); + + buf +} + +trait ErrMsg { + fn with_err_msg(self, val: String) -> Result; +} + +impl ErrMsg for Result { + fn with_err_msg(self, val: String) -> Result { + match self { + Ok(val) => Ok(val), + Err(err) => Err(format!("{}; original={}", val, err)), + } + } +} + +// Path to cargo executables +pub fn cargo_dir() -> PathBuf { + env::var_os("CARGO_BIN_PATH") + .map(PathBuf::from) + .or_else(|| { + env::current_exe().ok().map(|mut path| { + path.pop(); + if path.ends_with("deps") { + path.pop(); + } + path + }) + }) + .unwrap_or_else(|| panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test")) +} + +pub fn cargo_exe() -> PathBuf { + cargo_dir().join(format!("cargo{}", env::consts::EXE_SUFFIX)) +} + +/* + * + * ===== Matchers ===== + * + */ + +pub type MatchResult = Result<(), String>; + +#[must_use] +#[derive(Clone)] +pub struct Execs { + ran: bool, + process_builder: Option, + expect_stdout: Option, + expect_stdin: Option, + expect_stderr: Option, + expect_exit_code: Option, + expect_stdout_contains: Vec, + expect_stderr_contains: Vec, + expect_either_contains: Vec, + expect_stdout_contains_n: Vec<(String, usize)>, + expect_stdout_not_contains: Vec, + expect_stderr_not_contains: Vec, + expect_stderr_unordered: Vec, + expect_neither_contains: Vec, + expect_stderr_with_without: Vec<(Vec, Vec)>, + expect_json: Option>, + expect_json_contains_unordered: Vec, + stream_output: bool, +} + +impl Execs { + pub fn with_process_builder(mut self, p: ProcessBuilder) -> Execs { + self.process_builder = Some(p); + self + } + + /// Verifies that stdout is equal to the given lines. + /// See `lines_match` for supported patterns. + pub fn with_stdout(&mut self, expected: S) -> &mut Self { + self.expect_stdout = Some(expected.to_string()); + self + } + + /// Verifies that stderr is equal to the given lines. + /// See `lines_match` for supported patterns. + pub fn with_stderr(&mut self, expected: S) -> &mut Self { + self.expect_stderr = Some(expected.to_string()); + self + } + + /// Verifies the exit code from the process. + /// + /// This is not necessary if the expected exit code is `0`. + pub fn with_status(&mut self, expected: i32) -> &mut Self { + self.expect_exit_code = Some(expected); + self + } + + /// Removes exit code check for the process. + /// + /// By default, the expected exit code is `0`. + pub fn without_status(&mut self) -> &mut Self { + self.expect_exit_code = None; + self + } + + /// Verifies that stdout contains the given contiguous lines somewhere in + /// its output. + /// See `lines_match` for supported patterns. + pub fn with_stdout_contains(&mut self, expected: S) -> &mut Self { + self.expect_stdout_contains.push(expected.to_string()); + self + } + + /// Verifies that stderr contains the given contiguous lines somewhere in + /// its output. + /// See `lines_match` for supported patterns. + pub fn with_stderr_contains(&mut self, expected: S) -> &mut Self { + self.expect_stderr_contains.push(expected.to_string()); + self + } + + /// Verifies that either stdout or stderr contains the given contiguous + /// lines somewhere in its output. + /// See `lines_match` for supported patterns. + pub fn with_either_contains(&mut self, expected: S) -> &mut Self { + self.expect_either_contains.push(expected.to_string()); + self + } + + /// Verifies that stdout contains the given contiguous lines somewhere in + /// its output, and should be repeated `number` times. + /// See `lines_match` for supported patterns. + pub fn with_stdout_contains_n(&mut self, expected: S, number: usize) -> &mut Self { + self.expect_stdout_contains_n + .push((expected.to_string(), number)); + self + } + + /// Verifies that stdout does not contain the given contiguous lines. + /// See `lines_match` for supported patterns. + /// See note on `with_stderr_does_not_contain`. + pub fn with_stdout_does_not_contain(&mut self, expected: S) -> &mut Self { + self.expect_stdout_not_contains.push(expected.to_string()); + self + } + + /// Verifies that stderr does not contain the given contiguous lines. + /// See `lines_match` for supported patterns. + /// + /// Care should be taken when using this method because there is a + /// limitless number of possible things that *won't* appear. A typo means + /// your test will pass without verifying the correct behavior. If + /// possible, write the test first so that it fails, and then implement + /// your fix/feature to make it pass. + pub fn with_stderr_does_not_contain(&mut self, expected: S) -> &mut Self { + self.expect_stderr_not_contains.push(expected.to_string()); + self + } + + /// Verifies that all of the stderr output is equal to the given lines, + /// ignoring the order of the lines. + /// See `lines_match` for supported patterns. + /// This is useful when checking the output of `cargo build -v` since + /// the order of the output is not always deterministic. + /// Recommend use `with_stderr_contains` instead unless you really want to + /// check *every* line of output. + /// + /// Be careful when using patterns such as `[..]`, because you may end up + /// with multiple lines that might match, and this is not smart enough to + /// do anything like longest-match. For example, avoid something like: + /// + /// [RUNNING] `rustc [..] + /// [RUNNING] `rustc --crate-name foo [..] + /// + /// This will randomly fail if the other crate name is `bar`, and the + /// order changes. + pub fn with_stderr_unordered(&mut self, expected: S) -> &mut Self { + self.expect_stderr_unordered.push(expected.to_string()); + self + } + + /// Verify that a particular line appears in stderr with and without the + /// given substrings. Exactly one line must match. + /// + /// The substrings are matched as `contains`. Example: + /// + /// ```no_run + /// execs.with_stderr_line_without( + /// &[ + /// "[RUNNING] `rustc --crate-name build_script_build", + /// "-C opt-level=3", + /// ], + /// &["-C debuginfo", "-C incremental"], + /// ) + /// ``` + /// + /// This will check that a build line includes `-C opt-level=3` but does + /// not contain `-C debuginfo` or `-C incremental`. + /// + /// Be careful writing the `without` fragments, see note in + /// `with_stderr_does_not_contain`. + pub fn with_stderr_line_without( + &mut self, + with: &[S], + without: &[S], + ) -> &mut Self { + let with = with.iter().map(|s| s.to_string()).collect(); + let without = without.iter().map(|s| s.to_string()).collect(); + self.expect_stderr_with_without.push((with, without)); + self + } + + /// Verifies the JSON output matches the given JSON. + /// Typically used when testing cargo commands that emit JSON. + /// Each separate JSON object should be separated by a blank line. + /// Example: + /// assert_that( + /// p.cargo("metadata"), + /// execs().with_json(r#" + /// {"example": "abc"} + /// + /// {"example": "def"} + /// "#) + /// ); + /// Objects should match in the order given. + /// The order of arrays is ignored. + /// Strings support patterns described in `lines_match`. + /// Use `{...}` to match any object. + pub fn with_json(&mut self, expected: &str) -> &mut Self { + self.expect_json = Some( + expected + .split("\n\n") + .map(|line| line.to_string()) + .collect(), + ); + self + } + + /// Verifies JSON output contains the given objects (in any order) somewhere + /// in its output. + /// + /// CAUTION: Be very careful when using this. Make sure every object is + /// unique (not a subset of one another). Also avoid using objects that + /// could possibly match multiple output lines unless you're very sure of + /// what you are doing. + /// + /// See `with_json` for more detail. + pub fn with_json_contains_unordered(&mut self, expected: &str) -> &mut Self { + self.expect_json_contains_unordered + .extend(expected.split("\n\n").map(|line| line.to_string())); + self + } + + /// Forward subordinate process stdout/stderr to the terminal. + /// Useful for printf debugging of the tests. + /// CAUTION: CI will fail if you leave this in your test! + #[allow(unused)] + pub fn stream(&mut self) -> &mut Self { + self.stream_output = true; + self + } + + pub fn arg>(&mut self, arg: T) -> &mut Self { + if let Some(ref mut p) = self.process_builder { + p.arg(arg); + } + self + } + + pub fn cwd>(&mut self, path: T) -> &mut Self { + if let Some(ref mut p) = self.process_builder { + if let Some(cwd) = p.get_cwd() { + let new_path = cwd.join(path.as_ref()); + p.cwd(new_path); + } else { + p.cwd(path); + } + } + self + } + + pub fn env>(&mut self, key: &str, val: T) -> &mut Self { + if let Some(ref mut p) = self.process_builder { + p.env(key, val); + } + self + } + + pub fn env_remove(&mut self, key: &str) -> &mut Self { + if let Some(ref mut p) = self.process_builder { + p.env_remove(key); + } + self + } + + pub fn exec_with_output(&mut self) -> CargoResult { + self.ran = true; + // TODO avoid unwrap + let p = (&self.process_builder).clone().unwrap(); + p.exec_with_output() + } + + pub fn build_command(&mut self) -> Command { + self.ran = true; + // TODO avoid unwrap + let p = (&self.process_builder).clone().unwrap(); + p.build_command() + } + + pub fn masquerade_as_nightly_cargo(&mut self) -> &mut Self { + if let Some(ref mut p) = self.process_builder { + p.masquerade_as_nightly_cargo(); + } + self + } + + pub fn run(&mut self) { + self.ran = true; + let p = (&self.process_builder).clone().unwrap(); + if let Err(e) = self.match_process(&p) { + panic!("\nExpected: {:?}\n but: {}", self, e) + } + } + + pub fn run_output(&mut self, output: &Output) { + self.ran = true; + if let Err(e) = self.match_output(output) { + panic!("\nExpected: {:?}\n but: {}", self, e) + } + } + + fn verify_checks_output(&self, output: &Output) { + if self.expect_exit_code.unwrap_or(0) != 0 + && self.expect_stdout.is_none() + && self.expect_stdin.is_none() + && self.expect_stderr.is_none() + && self.expect_stdout_contains.is_empty() + && self.expect_stderr_contains.is_empty() + && self.expect_either_contains.is_empty() + && self.expect_stdout_contains_n.is_empty() + && self.expect_stdout_not_contains.is_empty() + && self.expect_stderr_not_contains.is_empty() + && self.expect_stderr_unordered.is_empty() + && self.expect_neither_contains.is_empty() + && self.expect_stderr_with_without.is_empty() + && self.expect_json.is_none() + && self.expect_json_contains_unordered.is_empty() + { + panic!( + "`with_status()` is used, but no output is checked.\n\ + The test must check the output to ensure the correct error is triggered.\n\ + --- stdout\n{}\n--- stderr\n{}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr), + ); + } + } + + fn match_process(&self, process: &ProcessBuilder) -> MatchResult { + println!("running {}", process); + let res = if self.stream_output { + if is_ci() { + panic!("`.stream()` is for local debugging") + } + process.exec_with_streaming( + &mut |out| { + println!("{}", out); + Ok(()) + }, + &mut |err| { + eprintln!("{}", err); + Ok(()) + }, + true, + ) + } else { + process.exec_with_output() + }; + + match res { + Ok(out) => self.match_output(&out), + Err(e) => { + let err = e.downcast_ref::(); + if let Some(&ProcessError { + output: Some(ref out), + .. + }) = err + { + return self.match_output(out); + } + let mut s = format!("could not exec process {}: {}", process, e); + for cause in e.iter_causes() { + s.push_str(&format!("\ncaused by: {}", cause)); + } + Err(s) + } + } + } + + fn match_output(&self, actual: &Output) -> MatchResult { + self.verify_checks_output(actual); + self.match_status(actual) + .and(self.match_stdout(actual)) + .and(self.match_stderr(actual)) + } + + fn match_status(&self, actual: &Output) -> MatchResult { + match self.expect_exit_code { + None => Ok(()), + Some(code) if actual.status.code() == Some(code) => Ok(()), + Some(_) => Err(format!( + "exited with {}\n--- stdout\n{}\n--- stderr\n{}", + actual.status, + String::from_utf8_lossy(&actual.stdout), + String::from_utf8_lossy(&actual.stderr) + )), + } + } + + fn match_stdout(&self, actual: &Output) -> MatchResult { + self.match_std( + self.expect_stdout.as_ref(), + &actual.stdout, + "stdout", + &actual.stderr, + MatchKind::Exact, + )?; + for expect in self.expect_stdout_contains.iter() { + self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stderr, + MatchKind::Partial, + )?; + } + for expect in self.expect_stderr_contains.iter() { + self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stdout, + MatchKind::Partial, + )?; + } + for &(ref expect, number) in self.expect_stdout_contains_n.iter() { + self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stderr, + MatchKind::PartialN(number), + )?; + } + for expect in self.expect_stdout_not_contains.iter() { + self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stderr, + MatchKind::NotPresent, + )?; + } + for expect in self.expect_stderr_not_contains.iter() { + self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stdout, + MatchKind::NotPresent, + )?; + } + for expect in self.expect_stderr_unordered.iter() { + self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stdout, + MatchKind::Unordered, + )?; + } + for expect in self.expect_neither_contains.iter() { + self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stdout, + MatchKind::NotPresent, + )?; + + self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stderr, + MatchKind::NotPresent, + )?; + } + + for expect in self.expect_either_contains.iter() { + let match_std = self.match_std( + Some(expect), + &actual.stdout, + "stdout", + &actual.stdout, + MatchKind::Partial, + ); + let match_err = self.match_std( + Some(expect), + &actual.stderr, + "stderr", + &actual.stderr, + MatchKind::Partial, + ); + + if let (Err(_), Err(_)) = (match_std, match_err) { + return Err(format!( + "expected to find:\n\ + {}\n\n\ + did not find in either output.", + expect + )); + } + } + + for (with, without) in self.expect_stderr_with_without.iter() { + self.match_with_without(&actual.stderr, with, without)?; + } + + if let Some(ref objects) = self.expect_json { + let stdout = str::from_utf8(&actual.stdout) + .map_err(|_| "stdout was not utf8 encoded".to_owned())?; + let lines = stdout + .lines() + .filter(|line| line.starts_with('{')) + .collect::>(); + if lines.len() != objects.len() { + return Err(format!( + "expected {} json lines, got {}, stdout:\n{}", + objects.len(), + lines.len(), + stdout + )); + } + for (obj, line) in objects.iter().zip(lines) { + self.match_json(obj, line)?; + } + } + + if !self.expect_json_contains_unordered.is_empty() { + let stdout = str::from_utf8(&actual.stdout) + .map_err(|_| "stdout was not utf8 encoded".to_owned())?; + let mut lines = stdout + .lines() + .filter(|line| line.starts_with('{')) + .collect::>(); + for obj in &self.expect_json_contains_unordered { + match lines + .iter() + .position(|line| self.match_json(obj, line).is_ok()) + { + Some(index) => lines.remove(index), + None => { + return Err(format!( + "Did not find expected JSON:\n\ + {}\n\ + Remaining available output:\n\ + {}\n", + serde_json::to_string_pretty(obj).unwrap(), + lines.join("\n") + )); + } + }; + } + } + Ok(()) + } + + fn match_stderr(&self, actual: &Output) -> MatchResult { + self.match_std( + self.expect_stderr.as_ref(), + &actual.stderr, + "stderr", + &actual.stdout, + MatchKind::Exact, + ) + } + + fn normalize_actual(&self, description: &str, actual: &[u8]) -> Result { + let actual = match str::from_utf8(actual) { + Err(..) => return Err(format!("{} was not utf8 encoded", description)), + Ok(actual) => actual, + }; + Ok(self.normalize_matcher(actual)) + } + + fn normalize_matcher(&self, matcher: &str) -> String { + // Let's not deal with / vs \ (windows...) + let matcher = matcher.replace("\\\\", "/").replace("\\", "/"); + + // Weirdness for paths on Windows extends beyond `/` vs `\` apparently. + // Namely paths like `c:\` and `C:\` are equivalent and that can cause + // issues. The return value of `env::current_dir()` may return a + // lowercase drive name, but we round-trip a lot of values through `Url` + // which will auto-uppercase the drive name. To just ignore this + // distinction we try to canonicalize as much as possible, taking all + // forms of a path and canonicalizing them to one. + let replace_path = |s: &str, path: &Path, with: &str| { + let path_through_url = Url::from_file_path(path).unwrap().to_file_path().unwrap(); + let path1 = path.display().to_string().replace("\\", "/"); + let path2 = path_through_url.display().to_string().replace("\\", "/"); + s.replace(&path1, with) + .replace(&path2, with) + .replace(with, &path1) + }; + + // Do the template replacements on the expected string. + let matcher = match &self.process_builder { + None => matcher.to_string(), + Some(p) => match p.get_cwd() { + None => matcher.to_string(), + Some(cwd) => replace_path(&matcher, cwd, "[CWD]"), + }, + }; + + // Similar to cwd above, perform similar treatment to the root path + // which in theory all of our paths should otherwise get rooted at. + let root = paths::root(); + let matcher = replace_path(&matcher, &root, "[ROOT]"); + + // Let's not deal with \r\n vs \n on windows... + let matcher = matcher.replace("\r", ""); + + // It's easier to read tabs in outputs if they don't show up as literal + // hidden characters + let matcher = matcher.replace("\t", ""); + + return matcher; + } + + fn match_std( + &self, + expected: Option<&String>, + actual: &[u8], + description: &str, + extra: &[u8], + kind: MatchKind, + ) -> MatchResult { + let out = match expected { + Some(out) => self.normalize_matcher(out), + None => return Ok(()), + }; + + let actual = self.normalize_actual(description, actual)?; + + match kind { + MatchKind::Exact => { + let a = actual.lines(); + let e = out.lines(); + + let diffs = self.diff_lines(a, e, false); + if diffs.is_empty() { + Ok(()) + } else { + Err(format!( + "differences:\n\ + {}\n\n\ + other output:\n\ + `{}`", + diffs.join("\n"), + String::from_utf8_lossy(extra) + )) + } + } + MatchKind::Partial => { + let mut a = actual.lines(); + let e = out.lines(); + + let mut diffs = self.diff_lines(a.clone(), e.clone(), true); + while a.next().is_some() { + let a = self.diff_lines(a.clone(), e.clone(), true); + if a.len() < diffs.len() { + diffs = a; + } + } + if diffs.is_empty() { + Ok(()) + } else { + Err(format!( + "expected to find:\n\ + {}\n\n\ + did not find in output:\n\ + {}", + out, actual + )) + } + } + MatchKind::PartialN(number) => { + let mut a = actual.lines(); + let e = out.lines(); + + let mut matches = 0; + + while let Some(..) = { + if self.diff_lines(a.clone(), e.clone(), true).is_empty() { + matches += 1; + } + a.next() + } {} + + if matches == number { + Ok(()) + } else { + Err(format!( + "expected to find {} occurrences:\n\ + {}\n\n\ + did not find in output:\n\ + {}", + number, out, actual + )) + } + } + MatchKind::NotPresent => { + let mut a = actual.lines(); + let e = out.lines(); + + let mut diffs = self.diff_lines(a.clone(), e.clone(), true); + while a.next().is_some() { + let a = self.diff_lines(a.clone(), e.clone(), true); + if a.len() < diffs.len() { + diffs = a; + } + } + if diffs.is_empty() { + Err(format!( + "expected not to find:\n\ + {}\n\n\ + but found in output:\n\ + {}", + out, actual + )) + } else { + Ok(()) + } + } + MatchKind::Unordered => { + let mut a = actual.lines().collect::>(); + let e = out.lines(); + + for e_line in e { + match a.iter().position(|a_line| lines_match(e_line, a_line)) { + Some(index) => a.remove(index), + None => { + return Err(format!( + "Did not find expected line:\n\ + {}\n\ + Remaining available output:\n\ + {}\n", + e_line, + a.join("\n") + )); + } + }; + } + if !a.is_empty() { + Err(format!( + "Output included extra lines:\n\ + {}\n", + a.join("\n") + )) + } else { + Ok(()) + } + } + } + } + + fn match_with_without( + &self, + actual: &[u8], + with: &[String], + without: &[String], + ) -> MatchResult { + let actual = self.normalize_actual("stderr", actual)?; + let contains = |s, line| { + let mut s = self.normalize_matcher(s); + s.insert_str(0, "[..]"); + s.push_str("[..]"); + lines_match(&s, line) + }; + let matches: Vec<&str> = actual + .lines() + .filter(|line| with.iter().all(|with| contains(with, line))) + .filter(|line| !without.iter().any(|without| contains(without, line))) + .collect(); + match matches.len() { + 0 => Err(format!( + "Could not find expected line in output.\n\ + With contents: {:?}\n\ + Without contents: {:?}\n\ + Actual stderr:\n\ + {}\n", + with, without, actual + )), + 1 => Ok(()), + _ => Err(format!( + "Found multiple matching lines, but only expected one.\n\ + With contents: {:?}\n\ + Without contents: {:?}\n\ + Matching lines:\n\ + {}\n", + with, + without, + matches.join("\n") + )), + } + } + + fn match_json(&self, expected: &str, line: &str) -> MatchResult { + let expected = self.normalize_matcher(expected); + let line = self.normalize_matcher(line); + let actual = match line.parse() { + Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)), + Ok(actual) => actual, + }; + let expected = match expected.parse() { + Err(e) => return Err(format!("invalid json, {}:\n`{}`", e, line)), + Ok(expected) => expected, + }; + + find_json_mismatch(&expected, &actual) + } + + fn diff_lines<'a>( + &self, + actual: str::Lines<'a>, + expected: str::Lines<'a>, + partial: bool, + ) -> Vec { + let actual = actual.take(if partial { + expected.clone().count() + } else { + usize::MAX + }); + zip_all(actual, expected) + .enumerate() + .filter_map(|(i, (a, e))| match (a, e) { + (Some(a), Some(e)) => { + if lines_match(e, a) { + None + } else { + Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a)) + } + } + (Some(a), None) => Some(format!("{:3} -\n + |{}|\n", i, a)), + (None, Some(e)) => Some(format!("{:3} - |{}|\n +\n", i, e)), + (None, None) => panic!("Cannot get here"), + }) + .collect() + } +} + +impl Drop for Execs { + fn drop(&mut self) { + if !self.ran && !std::thread::panicking() { + panic!("forgot to run this command"); + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +enum MatchKind { + Exact, + Partial, + PartialN(usize), + NotPresent, + Unordered, +} + +/// Compares a line with an expected pattern. +/// - Use `[..]` as a wildcard to match 0 or more characters on the same line +/// (similar to `.*` in a regex). +/// - Use `[EXE]` to optionally add `.exe` on Windows (empty string on other +/// platforms). +/// - There is a wide range of macros (such as `[COMPILING]` or `[WARNING]`) +/// to match cargo's "status" output and allows you to ignore the alignment. +/// See `substitute_macros` for a complete list of macros. +/// - `[ROOT]` the path to the test directory's root +/// - `[CWD]` is the working directory of the process that was run. +pub fn lines_match(expected: &str, mut actual: &str) -> bool { + let expected = substitute_macros(&expected); + for (i, part) in expected.split("[..]").enumerate() { + match actual.find(part) { + Some(j) => { + if i == 0 && j != 0 { + return false; + } + actual = &actual[j + part.len()..]; + } + None => return false, + } + } + actual.is_empty() || expected.ends_with("[..]") +} + +#[cargo_test] +fn lines_match_works() { + assert!(lines_match("a b", "a b")); + assert!(lines_match("a[..]b", "a b")); + assert!(lines_match("a[..]", "a b")); + assert!(lines_match("[..]", "a b")); + assert!(lines_match("[..]b", "a b")); + + assert!(!lines_match("[..]b", "c")); + assert!(!lines_match("b", "c")); + assert!(!lines_match("b", "cb")); +} + +/// Compares JSON object for approximate equality. +/// You can use `[..]` wildcard in strings (useful for OS-dependent things such +/// as paths). You can use a `"{...}"` string literal as a wildcard for +/// arbitrary nested JSON (useful for parts of object emitted by other programs +/// (e.g., rustc) rather than Cargo itself). Arrays are sorted before comparison. +pub fn find_json_mismatch(expected: &Value, actual: &Value) -> Result<(), String> { + match find_json_mismatch_r(expected, actual) { + Some((expected_part, actual_part)) => Err(format!( + "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", + serde_json::to_string_pretty(expected).unwrap(), + serde_json::to_string_pretty(&actual).unwrap(), + serde_json::to_string_pretty(expected_part).unwrap(), + serde_json::to_string_pretty(actual_part).unwrap(), + )), + None => Ok(()), + } +} + +fn find_json_mismatch_r<'a>( + expected: &'a Value, + actual: &'a Value, +) -> Option<(&'a Value, &'a Value)> { + use serde_json::Value::*; + match (expected, actual) { + (&Number(ref l), &Number(ref r)) if l == r => None, + (&Bool(l), &Bool(r)) if l == r => None, + (&String(ref l), &String(ref r)) if lines_match(l, r) => None, + (&Array(ref l), &Array(ref r)) => { + if l.len() != r.len() { + return Some((expected, actual)); + } + + let mut l = l.iter().collect::>(); + let mut r = r.iter().collect::>(); + + l.retain( + |l| match r.iter().position(|r| find_json_mismatch_r(l, r).is_none()) { + Some(i) => { + r.remove(i); + false + } + None => true, + }, + ); + + if !l.is_empty() { + assert!(!r.is_empty()); + Some((l[0], r[0])) + } else { + assert_eq!(r.len(), 0); + None + } + } + (&Object(ref l), &Object(ref r)) => { + let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k)); + if !same_keys { + return Some((expected, actual)); + } + + l.values() + .zip(r.values()) + .filter_map(|(l, r)| find_json_mismatch_r(l, r)) + .nth(0) + } + (&Null, &Null) => None, + // Magic string literal `"{...}"` acts as wildcard for any sub-JSON. + (&String(ref l), _) if l == "{...}" => None, + _ => Some((expected, actual)), + } +} + +struct ZipAll { + first: I1, + second: I2, +} + +impl, I2: Iterator> Iterator for ZipAll { + type Item = (Option, Option); + fn next(&mut self) -> Option<(Option, Option)> { + let first = self.first.next(); + let second = self.second.next(); + + match (first, second) { + (None, None) => None, + (a, b) => Some((a, b)), + } + } +} + +fn zip_all, I2: Iterator>(a: I1, b: I2) -> ZipAll { + ZipAll { + first: a, + second: b, + } +} + +impl fmt::Debug for Execs { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "execs") + } +} + +pub fn execs() -> Execs { + Execs { + ran: false, + process_builder: None, + expect_stdout: None, + expect_stderr: None, + expect_stdin: None, + expect_exit_code: Some(0), + expect_stdout_contains: Vec::new(), + expect_stderr_contains: Vec::new(), + expect_either_contains: Vec::new(), + expect_stdout_contains_n: Vec::new(), + expect_stdout_not_contains: Vec::new(), + expect_stderr_not_contains: Vec::new(), + expect_stderr_unordered: Vec::new(), + expect_neither_contains: Vec::new(), + expect_stderr_with_without: Vec::new(), + expect_json: None, + expect_json_contains_unordered: Vec::new(), + stream_output: false, + } +} + +pub trait Tap { + fn tap(self, callback: F) -> Self; +} + +impl Tap for T { + fn tap(mut self, callback: F) -> T { + callback(&mut self); + self + } +} + +pub fn basic_manifest(name: &str, version: &str) -> String { + format!( + r#" + [package] + name = "{}" + version = "{}" + authors = [] + "#, + name, version + ) +} + +pub fn basic_bin_manifest(name: &str) -> String { + format!( + r#" + [package] + + name = "{}" + version = "0.5.0" + authors = ["wycats@example.com"] + + [[bin]] + + name = "{}" + "#, + name, name + ) +} + +pub fn basic_lib_manifest(name: &str) -> String { + format!( + r#" + [package] + + name = "{}" + version = "0.5.0" + authors = ["wycats@example.com"] + + [lib] + + name = "{}" + "#, + name, name + ) +} + +pub fn path2url>(p: P) -> Url { + Url::from_file_path(p).ok().unwrap() +} + +fn substitute_macros(input: &str) -> String { + let macros = [ + ("[RUNNING]", " Running"), + ("[COMPILING]", " Compiling"), + ("[CHECKING]", " Checking"), + ("[CREATED]", " Created"), + ("[FINISHED]", " Finished"), + ("[ERROR]", "error:"), + ("[WARNING]", "warning:"), + ("[DOCUMENTING]", " Documenting"), + ("[FRESH]", " Fresh"), + ("[UPDATING]", " Updating"), + ("[ADDING]", " Adding"), + ("[REMOVING]", " Removing"), + ("[DOCTEST]", " Doc-tests"), + ("[PACKAGING]", " Packaging"), + ("[DOWNLOADING]", " Downloading"), + ("[DOWNLOADED]", " Downloaded"), + ("[UPLOADING]", " Uploading"), + ("[VERIFYING]", " Verifying"), + ("[ARCHIVING]", " Archiving"), + ("[INSTALLING]", " Installing"), + ("[REPLACING]", " Replacing"), + ("[UNPACKING]", " Unpacking"), + ("[SUMMARY]", " Summary"), + ("[FIXING]", " Fixing"), + ("[EXE]", env::consts::EXE_SUFFIX), + ("[IGNORED]", " Ignored"), + ("[INSTALLED]", " Installed"), + ("[REPLACED]", " Replaced"), + ("[NOTE]", " Note"), + ]; + let mut result = input.to_owned(); + for &(pat, subst) in ¯os { + result = result.replace(pat, subst) + } + result +} + +pub mod install; + +thread_local!( +pub static RUSTC: Rustc = Rustc::new( + PathBuf::from("rustc"), + None, + Path::new("should be path to rustup rustc, but we don't care in tests"), + None, +).unwrap() +); + +/// The rustc host such as `x86_64-unknown-linux-gnu`. +pub fn rustc_host() -> String { + RUSTC.with(|r| r.host.clone()) +} + +pub fn is_nightly() -> bool { + env::var("CARGO_TEST_DISABLE_NIGHTLY").is_err() + && RUSTC + .with(|r| r.verbose_version.contains("-nightly") || r.verbose_version.contains("-dev")) +} + +pub fn process>(t: T) -> cargo::util::ProcessBuilder { + _process(t.as_ref()) +} + +fn _process(t: &OsStr) -> cargo::util::ProcessBuilder { + let mut p = cargo::util::process(t); + p.cwd(&paths::root()) + .env_remove("CARGO_HOME") + .env("HOME", paths::home()) + .env("CARGO_HOME", paths::home().join(".cargo")) + .env("__CARGO_TEST_ROOT", paths::root()) + // Force Cargo to think it's on the stable channel for all tests, this + // should hopefully not surprise us as we add cargo features over time and + // cargo rides the trains. + .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "stable") + // For now disable incremental by default as support hasn't ridden to the + // stable channel yet. Once incremental support hits the stable compiler we + // can switch this to one and then fix the tests. + .env("CARGO_INCREMENTAL", "0") + // This env var can switch the git backend from libgit2 to git2-curl, which + // can tweak error messages and cause some tests to fail, so let's forcibly + // remove it. + .env_remove("CARGO_HTTP_CHECK_REVOKE") + .env_remove("__CARGO_DEFAULT_LIB_METADATA") + .env_remove("RUSTC") + .env_remove("RUSTDOC") + .env_remove("RUSTC_WRAPPER") + .env_remove("RUSTFLAGS") + .env_remove("XDG_CONFIG_HOME") // see #2345 + .env("GIT_CONFIG_NOSYSTEM", "1") // keep trying to sandbox ourselves + .env_remove("EMAIL") + .env_remove("USER") // not set on some rust-lang docker images + .env_remove("MFLAGS") + .env_remove("MAKEFLAGS") + .env_remove("CARGO_MAKEFLAGS") + .env_remove("GIT_AUTHOR_NAME") + .env_remove("GIT_AUTHOR_EMAIL") + .env_remove("GIT_COMMITTER_NAME") + .env_remove("GIT_COMMITTER_EMAIL") + .env_remove("CARGO_TARGET_DIR") // we assume 'target' + .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows + p +} + +pub trait ChannelChanger: Sized { + fn masquerade_as_nightly_cargo(&mut self) -> &mut Self; +} + +impl ChannelChanger for cargo::util::ProcessBuilder { + fn masquerade_as_nightly_cargo(&mut self) -> &mut Self { + self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") + } +} + +fn split_and_add_args(p: &mut ProcessBuilder, s: &str) { + for arg in s.split_whitespace() { + if arg.contains('"') || arg.contains('\'') { + panic!("shell-style argument parsing is not supported") + } + p.arg(arg); + } +} + +pub fn cargo_process(s: &str) -> Execs { + let mut p = process(&cargo_exe()); + split_and_add_args(&mut p, s); + execs().with_process_builder(p) +} + +pub fn git_process(s: &str) -> ProcessBuilder { + let mut p = process("git"); + split_and_add_args(&mut p, s); + p +} + +pub fn sleep_ms(ms: u64) { + ::std::thread::sleep(Duration::from_millis(ms)); +} + +/// Returns `true` if the local filesystem has low-resolution mtimes. +pub fn is_coarse_mtime() -> bool { + // If the filetime crate is being used to emulate HFS then + // return `true`, without looking at the actual hardware. + cfg!(emulate_second_only_system) || + // This should actually be a test that `$CARGO_TARGET_DIR` is on an HFS + // filesystem, (or any filesystem with low-resolution mtimes). However, + // that's tricky to detect, so for now just deal with CI. + cfg!(target_os = "macos") && is_ci() +} + +/// Some CI setups are much slower then the equipment used by Cargo itself. +/// Architectures that do not have a modern processor, hardware emulation, etc. +/// This provides a way for those setups to increase the cut off for all the time based test. +pub fn slow_cpu_multiplier(main: u64) -> Duration { + lazy_static::lazy_static! { + static ref SLOW_CPU_MULTIPLIER: u64 = + env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER").ok().and_then(|m| m.parse().ok()).unwrap_or(1); + } + Duration::from_secs(*SLOW_CPU_MULTIPLIER * main) +} + +pub fn clippy_is_available() -> bool { + if let Err(e) = process("clippy-driver").arg("-V").exec_with_output() { + eprintln!("clippy-driver not available, skipping clippy test"); + eprintln!("{:?}", e); + false + } else { + true + } +} + +#[cfg(windows)] +pub fn symlink_supported() -> bool { + if is_ci() { + // We want to be absolutely sure this runs on CI. + return true; + } + let src = paths::root().join("symlink_src"); + fs::write(&src, "").unwrap(); + let dst = paths::root().join("symlink_dst"); + let result = match os::windows::fs::symlink_file(&src, &dst) { + Ok(_) => { + fs::remove_file(&dst).unwrap(); + true + } + Err(e) => { + eprintln!( + "symlinks not supported: {:?}\n\ + Windows 10 users should enable developer mode.", + e + ); + false + } + }; + fs::remove_file(&src).unwrap(); + return result; +} + +#[cfg(not(windows))] +pub fn symlink_supported() -> bool { + true +} diff --git a/tests/testsuite/support/paths.rs b/tests/testsuite/support/paths.rs new file mode 100644 index 00000000000..7dfb65c69a3 --- /dev/null +++ b/tests/testsuite/support/paths.rs @@ -0,0 +1,255 @@ +use std::cell::RefCell; +use std::collections::HashMap; +use std::env; +use std::fs; +use std::io::{self, ErrorKind}; +use std::path::{Path, PathBuf}; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Mutex; + +use filetime::{self, FileTime}; +use lazy_static::lazy_static; + +static CARGO_INTEGRATION_TEST_DIR: &str = "cit"; + +lazy_static! { + static ref GLOBAL_ROOT: PathBuf = { + let mut path = t!(env::current_exe()); + path.pop(); // chop off exe name + path.pop(); // chop off 'debug' + + // If `cargo test` is run manually then our path looks like + // `target/debug/foo`, in which case our `path` is already pointing at + // `target`. If, however, `cargo test --target $target` is used then the + // output is `target/$target/debug/foo`, so our path is pointing at + // `target/$target`. Here we conditionally pop the `$target` name. + if path.file_name().and_then(|s| s.to_str()) != Some("target") { + path.pop(); + } + + path.push(CARGO_INTEGRATION_TEST_DIR); + path.mkdir_p(); + path + }; + + static ref TEST_ROOTS: Mutex> = Default::default(); +} + +// We need to give each test a unique id. The test name could serve this +// purpose, but the `test` crate doesn't have a way to obtain the current test +// name.[*] Instead, we used the `cargo-test-macro` crate to automatically +// insert an init function for each test that sets the test name in a thread +// local variable. +// +// [*] It does set the thread name, but only when running concurrently. If not +// running concurrently, all tests are run on the main thread. +thread_local! { + static TEST_ID: RefCell> = RefCell::new(None); +} + +pub struct TestIdGuard { + _private: (), +} + +pub fn init_root() -> TestIdGuard { + static NEXT_ID: AtomicUsize = AtomicUsize::new(0); + + let id = NEXT_ID.fetch_add(1, Ordering::Relaxed); + TEST_ID.with(|n| *n.borrow_mut() = Some(id)); + + let guard = TestIdGuard { _private: () }; + + let r = root(); + r.rm_rf(); + r.mkdir_p(); + + guard +} + +impl Drop for TestIdGuard { + fn drop(&mut self) { + TEST_ID.with(|n| *n.borrow_mut() = None); + } +} + +pub fn root() -> PathBuf { + let id = TEST_ID.with(|n| { + n.borrow().expect( + "Tests must use the `#[cargo_test]` attribute in \ + order to be able to use the crate root.", + ) + }); + GLOBAL_ROOT.join(&format!("t{}", id)) +} + +pub fn home() -> PathBuf { + let mut path = root(); + path.push("home"); + path.mkdir_p(); + path +} + +pub trait CargoPathExt { + fn rm_rf(&self); + fn mkdir_p(&self); + + fn move_into_the_past(&self) { + self.move_in_time(|sec, nsec| (sec - 3600, nsec)) + } + + fn move_into_the_future(&self) { + self.move_in_time(|sec, nsec| (sec + 3600, nsec)) + } + + fn move_in_time(&self, travel_amount: F) + where + F: Fn(i64, u32) -> (i64, u32); + + fn is_symlink(&self) -> bool; +} + +impl CargoPathExt for Path { + /* Technically there is a potential race condition, but we don't + * care all that much for our tests + */ + fn rm_rf(&self) { + if self.exists() { + if let Err(e) = remove_dir_all::remove_dir_all(self) { + panic!("failed to remove {:?}: {:?}", self, e) + } + } + } + + fn mkdir_p(&self) { + fs::create_dir_all(self) + .unwrap_or_else(|e| panic!("failed to mkdir_p {}: {}", self.display(), e)) + } + + fn move_in_time(&self, travel_amount: F) + where + F: Fn(i64, u32) -> ((i64, u32)), + { + if self.is_file() { + time_travel(self, &travel_amount); + } else { + recurse(self, &self.join("target"), &travel_amount); + } + + fn recurse(p: &Path, bad: &Path, travel_amount: &F) + where + F: Fn(i64, u32) -> ((i64, u32)), + { + if p.is_file() { + time_travel(p, travel_amount) + } else if !p.starts_with(bad) { + for f in t!(fs::read_dir(p)) { + let f = t!(f).path(); + recurse(&f, bad, travel_amount); + } + } + } + + fn time_travel(path: &Path, travel_amount: &F) + where + F: Fn(i64, u32) -> ((i64, u32)), + { + let stat = t!(path.metadata()); + + let mtime = FileTime::from_last_modification_time(&stat); + + let (sec, nsec) = travel_amount(mtime.unix_seconds(), mtime.nanoseconds()); + let newtime = FileTime::from_unix_time(sec, nsec); + + // Sadly change_file_times has a failure mode where a readonly file + // cannot have its times changed on windows. + do_op(path, "set file times", |path| { + filetime::set_file_times(path, newtime, newtime) + }); + } + } + + fn is_symlink(&self) -> bool { + fs::symlink_metadata(self) + .map(|m| m.file_type().is_symlink()) + .unwrap_or(false) + } +} + +fn do_op(path: &Path, desc: &str, mut f: F) +where + F: FnMut(&Path) -> io::Result<()>, +{ + match f(path) { + Ok(()) => {} + Err(ref e) if e.kind() == ErrorKind::PermissionDenied => { + let mut p = t!(path.metadata()).permissions(); + p.set_readonly(false); + t!(fs::set_permissions(path, p)); + + // Unix also requires the parent to not be readonly for example when + // removing files + let parent = path.parent().unwrap(); + let mut p = t!(parent.metadata()).permissions(); + p.set_readonly(false); + t!(fs::set_permissions(parent, p)); + + f(path).unwrap_or_else(|e| { + panic!("failed to {} {}: {}", desc, path.display(), e); + }) + } + Err(e) => { + panic!("failed to {} {}: {}", desc, path.display(), e); + } + } +} + +/// Get the filename for a library. +/// +/// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" +/// +/// For example, dynamic library named "foo" would return: +/// - macOS: "libfoo.dylib" +/// - Windows: "foo.dll" +/// - Unix: "libfoo.so" +pub fn get_lib_filename(name: &str, kind: &str) -> String { + let prefix = get_lib_prefix(kind); + let extension = get_lib_extension(kind); + format!("{}{}.{}", prefix, name, extension) +} + +pub fn get_lib_prefix(kind: &str) -> &str { + match kind { + "lib" | "rlib" => "lib", + "staticlib" | "dylib" | "proc-macro" => { + if cfg!(windows) { + "" + } else { + "lib" + } + } + _ => unreachable!(), + } +} + +pub fn get_lib_extension(kind: &str) -> &str { + match kind { + "lib" | "rlib" => "rlib", + "staticlib" => { + if cfg!(windows) { + "lib" + } else { + "a" + } + } + "dylib" | "proc-macro" => { + if cfg!(windows) { + "dll" + } else if cfg!(target_os = "macos") { + "dylib" + } else { + "so" + } + } + _ => unreachable!(), + } +} diff --git a/tests/testsuite/support/publish.rs b/tests/testsuite/support/publish.rs new file mode 100644 index 00000000000..0230aa63833 --- /dev/null +++ b/tests/testsuite/support/publish.rs @@ -0,0 +1,132 @@ +use std::collections::{HashMap, HashSet}; +use std::fs::File; +use std::io::{self, prelude::*, SeekFrom}; +use std::path::{Path, PathBuf}; + +use crate::support::find_json_mismatch; +use crate::support::registry::{self, alt_api_path}; + +use flate2::read::GzDecoder; +use tar::Archive; + +fn read_le_u32(mut reader: R) -> io::Result +where + R: Read, +{ + let mut buf = [0; 4]; + reader.read_exact(&mut buf)?; + Ok(u32::from_le_bytes(buf)) +} + +/// Checks the result of a crate publish. +pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) { + let new_path = registry::api_path().join("api/v1/crates/new"); + _validate_upload( + &new_path, + expected_json, + expected_crate_name, + expected_files, + ); +} + +/// Checks the result of a crate publish to an alternative registry. +pub fn validate_alt_upload( + expected_json: &str, + expected_crate_name: &str, + expected_files: &[&str], +) { + let new_path = alt_api_path().join("api/v1/crates/new"); + _validate_upload( + &new_path, + expected_json, + expected_crate_name, + expected_files, + ); +} + +fn _validate_upload( + new_path: &Path, + expected_json: &str, + expected_crate_name: &str, + expected_files: &[&str], +) { + let mut f = File::open(new_path).unwrap(); + // 32-bit little-endian integer of length of JSON data. + let json_sz = read_le_u32(&mut f).expect("read json length"); + let mut json_bytes = vec![0; json_sz as usize]; + f.read_exact(&mut json_bytes).expect("read JSON data"); + let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid"); + let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse"); + find_json_mismatch(&expected_json, &actual_json) + .expect("uploaded JSON did not match expected JSON"); + + // 32-bit little-endian integer of length of crate file. + let crate_sz = read_le_u32(&mut f).expect("read crate length"); + let mut krate_bytes = vec![0; crate_sz as usize]; + f.read_exact(&mut krate_bytes).expect("read crate data"); + // Check at end. + let current = f.seek(SeekFrom::Current(0)).unwrap(); + assert_eq!(f.seek(SeekFrom::End(0)).unwrap(), current); + + // Verify the tarball. + validate_crate_contents(&krate_bytes[..], expected_crate_name, expected_files, &[]); +} + +/// Checks the contents of a `.crate` file. +/// +/// - `expected_crate_name` should be something like `foo-0.0.1.crate`. +/// - `expected_files` should be a complete list of files in the crate +/// (relative to expected_crate_name). +/// - `expected_contents` should be a list of `(file_name, contents)` tuples +/// to validate the contents of the given file. Only the listed files will +/// be checked (others will be ignored). +pub fn validate_crate_contents( + reader: impl Read, + expected_crate_name: &str, + expected_files: &[&str], + expected_contents: &[(&str, &str)], +) { + let mut rdr = GzDecoder::new(reader); + assert_eq!( + rdr.header().unwrap().filename().unwrap(), + expected_crate_name.as_bytes() + ); + let mut contents = Vec::new(); + rdr.read_to_end(&mut contents).unwrap(); + let mut ar = Archive::new(&contents[..]); + let files: HashMap = ar + .entries() + .unwrap() + .map(|entry| { + let mut entry = entry.unwrap(); + let name = entry.path().unwrap().into_owned(); + let mut contents = String::new(); + entry.read_to_string(&mut contents).unwrap(); + (name, contents) + }) + .collect(); + assert!(expected_crate_name.ends_with(".crate")); + let base_crate_name = Path::new(&expected_crate_name[..expected_crate_name.len() - 6]); + let actual_files: HashSet = files.keys().cloned().collect(); + let expected_files: HashSet = expected_files + .iter() + .map(|name| base_crate_name.join(name)) + .collect(); + let missing: Vec<&PathBuf> = expected_files.difference(&actual_files).collect(); + let extra: Vec<&PathBuf> = actual_files.difference(&expected_files).collect(); + if !missing.is_empty() || !extra.is_empty() { + panic!( + "uploaded archive does not match.\nMissing: {:?}\nExtra: {:?}\n", + missing, extra + ); + } + if !expected_contents.is_empty() { + for (e_file_name, e_file_contents) in expected_contents { + let full_e_name = base_crate_name.join(e_file_name); + let actual_contents = files + .get(&full_e_name) + .unwrap_or_else(|| panic!("file `{}` missing in archive", e_file_name)); + assert_eq!(actual_contents, e_file_contents); + } + } +} diff --git a/tests/testsuite/support/registry.rs b/tests/testsuite/support/registry.rs new file mode 100644 index 00000000000..709f748ce6a --- /dev/null +++ b/tests/testsuite/support/registry.rs @@ -0,0 +1,599 @@ +use std::collections::HashMap; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{Path, PathBuf}; + +use cargo::sources::CRATES_IO_INDEX; +use cargo::util::Sha256; +use flate2::write::GzEncoder; +use flate2::Compression; +use tar::{Builder, Header}; +use url::Url; + +use crate::support::git::repo; +use crate::support::paths; + +/// Gets the path to the local index pretending to be crates.io. This is a Git repo +/// initialized with a `config.json` file pointing to `dl_path` for downloads +/// and `api_path` for uploads. +pub fn registry_path() -> PathBuf { + paths::root().join("registry") +} +pub fn registry_url() -> Url { + Url::from_file_path(registry_path()).ok().unwrap() +} +/// Gets the path for local web API uploads. Cargo will place the contents of a web API +/// request here. For example, `api/v1/crates/new` is the result of publishing a crate. +pub fn api_path() -> PathBuf { + paths::root().join("api") +} +pub fn api_url() -> Url { + Url::from_file_path(api_path()).ok().unwrap() +} +/// Gets the path where crates can be downloaded using the web API endpoint. Crates +/// should be organized as `{name}/{version}/download` to match the web API +/// endpoint. This is rarely used and must be manually set up. +pub fn dl_path() -> PathBuf { + paths::root().join("dl") +} +pub fn dl_url() -> Url { + Url::from_file_path(dl_path()).ok().unwrap() +} +/// Gets the alternative-registry version of `registry_path`. +pub fn alt_registry_path() -> PathBuf { + paths::root().join("alternative-registry") +} +pub fn alt_registry_url() -> Url { + Url::from_file_path(alt_registry_path()).ok().unwrap() +} +/// Gets the alternative-registry version of `dl_path`. +pub fn alt_dl_path() -> PathBuf { + paths::root().join("alt_dl") +} +pub fn alt_dl_url() -> String { + let base = Url::from_file_path(alt_dl_path()).ok().unwrap(); + format!("{}/{{crate}}/{{version}}/{{crate}}-{{version}}.crate", base) +} +/// Gets the alternative-registry version of `api_path`. +pub fn alt_api_path() -> PathBuf { + paths::root().join("alt_api") +} +pub fn alt_api_url() -> Url { + Url::from_file_path(alt_api_path()).ok().unwrap() +} + +/// A builder for creating a new package in a registry. +/// +/// This uses "source replacement" using an automatically generated +/// `.cargo/config` file to ensure that dependencies will use these packages +/// instead of contacting crates.io. See `source-replacement.md` for more +/// details on how source replacement works. +/// +/// Call `publish` to finalize and create the package. +/// +/// If no files are specified, an empty `lib.rs` file is automatically created. +/// +/// The `Cargo.toml` file is automatically generated based on the methods +/// called on `Package` (for example, calling `dep()` will add to the +/// `[dependencies]` automatically). You may also specify a `Cargo.toml` file +/// to override the generated one. +/// +/// This supports different registry types: +/// - Regular source replacement that replaces `crates.io` (the default). +/// - A "local registry" which is a subset for vendoring (see +/// `Package::local`). +/// - An "alternative registry" which requires specifying the registry name +/// (see `Package::alternative`). +/// +/// This does not support "directory sources". See `directory.rs` for +/// `VendorPackage` which implements directory sources. +/// +/// # Example +/// ``` +/// // Publish package "a" depending on "b". +/// Package::new("a", "1.0.0") +/// .dep("b", "1.0.0") +/// .file("src/lib.rs", r#" +/// extern crate b; +/// pub fn f() -> i32 { b::f() * 2 } +/// "#) +/// .publish(); +/// +/// // Publish package "b". +/// Package::new("b", "1.0.0") +/// .file("src/lib.rs", r#" +/// pub fn f() -> i32 { 12 } +/// "#) +/// .publish(); +/// +/// // Create a project that uses package "a". +/// let p = project() +/// .file("Cargo.toml", r#" +/// [package] +/// name = "foo" +/// version = "0.0.1" +/// +/// [dependencies] +/// a = "1.0" +/// "#) +/// .file("src/main.rs", r#" +/// extern crate a; +/// fn main() { println!("{}", a::f()); } +/// "#) +/// .build(); +/// +/// p.cargo("run").with_stdout("24").run(); +/// ``` +#[must_use] +pub struct Package { + name: String, + vers: String, + deps: Vec, + files: Vec<(String, String)>, + extra_files: Vec<(String, String)>, + yanked: bool, + features: HashMap>, + local: bool, + alternative: bool, + invalid_json: bool, +} + +#[derive(Clone)] +pub struct Dependency { + name: String, + vers: String, + kind: String, + target: Option, + features: Vec, + registry: Option, + package: Option, + optional: bool, +} + +pub fn init() { + let config = paths::home().join(".cargo/config"); + t!(fs::create_dir_all(config.parent().unwrap())); + if fs::metadata(&config).is_ok() { + return; + } + t!(t!(File::create(&config)).write_all( + format!( + r#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'dummy-registry' + + [source.dummy-registry] + registry = '{reg}' + + [registries.alternative] + index = '{alt}' + "#, + reg = registry_url(), + alt = alt_registry_url() + ) + .as_bytes() + )); + let credentials = paths::home().join(".cargo/credentials"); + t!(t!(File::create(&credentials)).write_all( + br#" + [registry] + token = "api-token" + + [registries.alternative] + token = "api-token" + "# + )); + + // Initialize a new registry. + let _ = repo(®istry_path()) + .file( + "config.json", + &format!( + r#" + {{"dl":"{}","api":"{}"}} + "#, + dl_url(), + api_url() + ), + ) + .build(); + fs::create_dir_all(api_path().join("api/v1/crates")).unwrap(); + + // Initialize an alternative registry. + repo(&alt_registry_path()) + .file( + "config.json", + &format!( + r#" + {{"dl":"{}","api":"{}"}} + "#, + alt_dl_url(), + alt_api_url() + ), + ) + .build(); + fs::create_dir_all(alt_api_path().join("api/v1/crates")).unwrap(); +} + +impl Package { + /// Creates a new package builder. + /// Call `publish()` to finalize and build the package. + pub fn new(name: &str, vers: &str) -> Package { + init(); + Package { + name: name.to_string(), + vers: vers.to_string(), + deps: Vec::new(), + files: Vec::new(), + extra_files: Vec::new(), + yanked: false, + features: HashMap::new(), + local: false, + alternative: false, + invalid_json: false, + } + } + + /// Call with `true` to publish in a "local registry". + /// + /// See `source-replacement.html#local-registry-sources` for more details + /// on local registries. See `local_registry.rs` for the tests that use + /// this. + pub fn local(&mut self, local: bool) -> &mut Package { + self.local = local; + self + } + + /// Call with `true` to publish in an "alternative registry". + /// + /// The name of the alternative registry is called "alternative". + /// + /// See `src/doc/src/reference/registries.md` for more details on + /// alternative registries. See `alt_registry.rs` for the tests that use + /// this. + pub fn alternative(&mut self, alternative: bool) -> &mut Package { + self.alternative = alternative; + self + } + + /// Adds a file to the package. + pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { + self.files.push((name.to_string(), contents.to_string())); + self + } + + /// Adds an "extra" file that is not rooted within the package. + /// + /// Normal files are automatically placed within a directory named + /// `$PACKAGE-$VERSION`. This allows you to override that behavior, + /// typically for testing invalid behavior. + pub fn extra_file(&mut self, name: &str, contents: &str) -> &mut Package { + self.extra_files + .push((name.to_string(), contents.to_string())); + self + } + + /// Adds a normal dependency. Example: + /// ``` + /// [dependencies] + /// foo = {version = "1.0"} + /// ``` + pub fn dep(&mut self, name: &str, vers: &str) -> &mut Package { + self.add_dep(&Dependency::new(name, vers)) + } + + /// Adds a dependency with the given feature. Example: + /// ``` + /// [dependencies] + /// foo = {version = "1.0", "features": ["feat1", "feat2"]} + /// ``` + pub fn feature_dep(&mut self, name: &str, vers: &str, features: &[&str]) -> &mut Package { + self.add_dep(Dependency::new(name, vers).enable_features(features)) + } + + /// Adds a platform-specific dependency. Example: + /// ``` + /// [target.'cfg(windows)'.dependencies] + /// foo = {version = "1.0"} + /// ``` + pub fn target_dep(&mut self, name: &str, vers: &str, target: &str) -> &mut Package { + self.add_dep(Dependency::new(name, vers).target(target)) + } + + /// Adds a dependency to the alternative registry. + pub fn registry_dep(&mut self, name: &str, vers: &str) -> &mut Package { + self.add_dep(Dependency::new(name, vers).registry("alternative")) + } + + /// Adds a dev-dependency. Example: + /// ``` + /// [dev-dependencies] + /// foo = {version = "1.0"} + /// ``` + pub fn dev_dep(&mut self, name: &str, vers: &str) -> &mut Package { + self.add_dep(Dependency::new(name, vers).dev()) + } + + /// Adds a build-dependency. Example: + /// ``` + /// [build-dependencies] + /// foo = {version = "1.0"} + /// ``` + pub fn build_dep(&mut self, name: &str, vers: &str) -> &mut Package { + self.add_dep(Dependency::new(name, vers).build()) + } + + pub fn add_dep(&mut self, dep: &Dependency) -> &mut Package { + self.deps.push(dep.clone()); + self + } + + /// Specifies whether or not the package is "yanked". + pub fn yanked(&mut self, yanked: bool) -> &mut Package { + self.yanked = yanked; + self + } + + /// Adds an entry in the `[features]` section. + pub fn feature(&mut self, name: &str, deps: &[&str]) -> &mut Package { + let deps = deps.iter().map(|s| s.to_string()).collect(); + self.features.insert(name.to_string(), deps); + self + } + + /// Causes the JSON line emitted in the index to be invalid, presumably + /// causing Cargo to skip over this version. + pub fn invalid_json(&mut self, invalid: bool) -> &mut Package { + self.invalid_json = invalid; + self + } + + /// Creates the package and place it in the registry. + /// + /// This does not actually use Cargo's publishing system, but instead + /// manually creates the entry in the registry on the filesystem. + /// + /// Returns the checksum for the package. + pub fn publish(&self) -> String { + self.make_archive(); + + // Figure out what we're going to write into the index. + let deps = self + .deps + .iter() + .map(|dep| { + // In the index, the `registry` is null if it is from the same registry. + // In Cargo.toml, it is None if it is from crates.io. + let registry_url = + match (self.alternative, dep.registry.as_ref().map(|s| s.as_ref())) { + (false, None) => None, + (false, Some("alternative")) => Some(alt_registry_url().to_string()), + (true, None) => Some(CRATES_IO_INDEX.to_string()), + (true, Some("alternative")) => None, + _ => panic!("registry_dep currently only supports `alternative`"), + }; + serde_json::json!({ + "name": dep.name, + "req": dep.vers, + "features": dep.features, + "default_features": true, + "target": dep.target, + "optional": dep.optional, + "kind": dep.kind, + "registry": registry_url, + "package": dep.package, + }) + }) + .collect::>(); + let cksum = { + let mut c = Vec::new(); + t!(t!(File::open(&self.archive_dst())).read_to_end(&mut c)); + cksum(&c) + }; + let name = if self.invalid_json { + serde_json::json!(1) + } else { + serde_json::json!(self.name) + }; + let line = serde_json::json!({ + "name": name, + "vers": self.vers, + "deps": deps, + "cksum": cksum, + "features": self.features, + "yanked": self.yanked, + }) + .to_string(); + + let file = match self.name.len() { + 1 => format!("1/{}", self.name), + 2 => format!("2/{}", self.name), + 3 => format!("3/{}/{}", &self.name[..1], self.name), + _ => format!("{}/{}/{}", &self.name[0..2], &self.name[2..4], self.name), + }; + + let registry_path = if self.alternative { + alt_registry_path() + } else { + registry_path() + }; + + // Write file/line in the index. + let dst = if self.local { + registry_path.join("index").join(&file) + } else { + registry_path.join(&file) + }; + let mut prev = String::new(); + let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev)); + t!(fs::create_dir_all(dst.parent().unwrap())); + t!(t!(File::create(&dst)).write_all((prev + &line[..] + "\n").as_bytes())); + + // Add the new file to the index. + if !self.local { + let repo = t!(git2::Repository::open(®istry_path)); + let mut index = t!(repo.index()); + t!(index.add_path(Path::new(&file))); + t!(index.write()); + let id = t!(index.write_tree()); + + // Commit this change. + let tree = t!(repo.find_tree(id)); + let sig = t!(repo.signature()); + let parent = t!(repo.refname_to_id("refs/heads/master")); + let parent = t!(repo.find_commit(parent)); + t!(repo.commit( + Some("HEAD"), + &sig, + &sig, + "Another commit", + &tree, + &[&parent] + )); + } + + cksum + } + + fn make_archive(&self) { + let mut manifest = format!( + r#" + [package] + name = "{}" + version = "{}" + authors = [] + "#, + self.name, self.vers + ); + for dep in self.deps.iter() { + let target = match dep.target { + None => String::new(), + Some(ref s) => format!("target.'{}'.", s), + }; + let kind = match &dep.kind[..] { + "build" => "build-", + "dev" => "dev-", + _ => "", + }; + manifest.push_str(&format!( + r#" + [{}{}dependencies.{}] + version = "{}" + "#, + target, kind, dep.name, dep.vers + )); + if let Some(registry) = &dep.registry { + assert_eq!(registry, "alternative"); + manifest.push_str(&format!("registry-index = \"{}\"", alt_registry_url())); + } + } + + let dst = self.archive_dst(); + t!(fs::create_dir_all(dst.parent().unwrap())); + let f = t!(File::create(&dst)); + let mut a = Builder::new(GzEncoder::new(f, Compression::default())); + self.append(&mut a, "Cargo.toml", &manifest); + if self.files.is_empty() { + self.append(&mut a, "src/lib.rs", ""); + } else { + for &(ref name, ref contents) in self.files.iter() { + self.append(&mut a, name, contents); + } + } + for &(ref name, ref contents) in self.extra_files.iter() { + self.append_extra(&mut a, name, contents); + } + } + + fn append(&self, ar: &mut Builder, file: &str, contents: &str) { + self.append_extra( + ar, + &format!("{}-{}/{}", self.name, self.vers, file), + contents, + ); + } + + fn append_extra(&self, ar: &mut Builder, path: &str, contents: &str) { + let mut header = Header::new_ustar(); + header.set_size(contents.len() as u64); + t!(header.set_path(path)); + header.set_cksum(); + t!(ar.append(&header, contents.as_bytes())); + } + + /// Returns the path to the compressed package file. + pub fn archive_dst(&self) -> PathBuf { + if self.local { + registry_path().join(format!("{}-{}.crate", self.name, self.vers)) + } else if self.alternative { + alt_dl_path() + .join(&self.name) + .join(&self.vers) + .join(&format!("{}-{}.crate", self.name, self.vers)) + } else { + dl_path().join(&self.name).join(&self.vers).join("download") + } + } +} + +pub fn cksum(s: &[u8]) -> String { + Sha256::new().update(s).finish_hex() +} + +impl Dependency { + pub fn new(name: &str, vers: &str) -> Dependency { + Dependency { + name: name.to_string(), + vers: vers.to_string(), + kind: "normal".to_string(), + target: None, + features: Vec::new(), + package: None, + optional: false, + registry: None, + } + } + + /// Changes this to `[build-dependencies]`. + pub fn build(&mut self) -> &mut Self { + self.kind = "build".to_string(); + self + } + + /// Changes this to `[dev-dependencies]`. + pub fn dev(&mut self) -> &mut Self { + self.kind = "dev".to_string(); + self + } + + /// Changes this to `[target.$target.dependencies]`. + pub fn target(&mut self, target: &str) -> &mut Self { + self.target = Some(target.to_string()); + self + } + + /// Adds `registry = $registry` to this dependency. + pub fn registry(&mut self, registry: &str) -> &mut Self { + self.registry = Some(registry.to_string()); + self + } + + /// Adds `features = [ ... ]` to this dependency. + pub fn enable_features(&mut self, features: &[&str]) -> &mut Self { + self.features.extend(features.iter().map(|s| s.to_string())); + self + } + + /// Adds `package = ...` to this dependency. + pub fn package(&mut self, pkg: &str) -> &mut Self { + self.package = Some(pkg.to_string()); + self + } + + /// Changes this to an optional dependency. + pub fn optional(&mut self, optional: bool) -> &mut Self { + self.optional = optional; + self + } +} diff --git a/tests/testsuite/test.rs b/tests/testsuite/test.rs new file mode 100644 index 00000000000..3c6fb5314b8 --- /dev/null +++ b/tests/testsuite/test.rs @@ -0,0 +1,3660 @@ +use std::fs::File; +use std::io::prelude::*; + +use cargo; + +use crate::support::paths::CargoPathExt; +use crate::support::registry::Package; +use crate::support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, cargo_exe, project}; +use crate::support::{rustc_host, sleep_ms}; + +#[cargo_test] +fn cargo_test_simple() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[test] + fn test_hello() { + assert_eq!(hello(), "hello") + }"#, + ) + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("hello\n").run(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("test test_hello ... ok") + .run(); +} + +#[cargo_test] +fn cargo_test_release() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + authors = [] + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + pub fn foo() { bar::bar(); } + + #[test] + fn test() { foo(); } + "#, + ) + .file( + "tests/test.rs", + r#" + extern crate foo; + + #[test] + fn test() { foo::foo(); } + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "pub fn bar() {}") + .build(); + + p.cargo("test -v --release") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[RUNNING] [..] -C opt-level=3 [..] +[COMPILING] foo v0.1.0 ([CWD]) +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[RUNNING] [..] -C opt-level=3 [..] +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `[..]target/release/deps/foo-[..][EXE]` +[RUNNING] `[..]target/release/deps/test-[..][EXE]` +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]lib.rs[..]`", + ) + .with_stdout_contains_n("test test ... ok", 2) + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn cargo_test_overflow_checks() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.5.0" + authors = [] + + [[bin]] + name = "foo" + + [profile.release] + overflow-checks = true + "#, + ) + .file( + "src/foo.rs", + r#" + use std::panic; + pub fn main() { + let r = panic::catch_unwind(|| { + [1, i32::max_value()].iter().sum::(); + }); + assert!(r.is_err()); + }"#, + ) + .build(); + + p.cargo("build --release").run(); + assert!(p.release_bin("foo").is_file()); + + p.process(&p.release_bin("foo")).with_stdout("").run(); +} + +#[cargo_test] +fn cargo_test_quiet_with_harness() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[test]] + name = "foo" + path = "src/foo.rs" + harness = true + "#, + ) + .file( + "src/foo.rs", + r#" + fn main() {} + #[test] fn test_hello() {} + "#, + ) + .build(); + + p.cargo("test -q") + .with_stdout( + " +running 1 test +. +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out + +", + ) + .with_stderr("") + .run(); +} + +#[cargo_test] +fn cargo_test_quiet_no_harness() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [[bin]] + name = "foo" + test = false + + [[test]] + name = "foo" + path = "src/main.rs" + harness = false + "#, + ) + .file( + "src/main.rs", + r#" + fn main() {} + #[test] fn test_hello() {} + "#, + ) + .build(); + + p.cargo("test -q").with_stdout("").with_stderr("").run(); +} + +#[cargo_test] +fn cargo_test_verbose() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn main() {} + #[test] fn test_hello() {} + "#, + ) + .build(); + + p.cargo("test -v hello") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc [..] src/main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[CWD]/target/debug/deps/foo-[..] hello` +", + ) + .with_stdout_contains("test test_hello ... ok") + .run(); +} + +#[cargo_test] +fn many_similar_names() { + let p = project() + .file( + "src/lib.rs", + " + pub fn foo() {} + #[test] fn lib_test() {} + ", + ) + .file( + "src/main.rs", + " + extern crate foo; + fn main() {} + #[test] fn bin_test() { foo::foo() } + ", + ) + .file( + "tests/foo.rs", + r#" + extern crate foo; + #[test] fn test_test() { foo::foo() } + "#, + ) + .build(); + + p.cargo("test -v") + .with_stdout_contains("test bin_test ... ok") + .with_stdout_contains("test lib_test ... ok") + .with_stdout_contains("test test_test ... ok") + .run(); +} + +#[cargo_test] +fn cargo_test_failing_test_in_bin() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file( + "src/main.rs", + r#" + fn hello() -> &'static str { + "hello" + } + + pub fn main() { + println!("{}", hello()) + } + + #[test] + fn test_hello() { + assert_eq!(hello(), "nope") + }"#, + ) + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("hello\n").run(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[ERROR] test failed, to rerun pass '--bin foo'", + ) + .with_stdout_contains( + " +running 1 test +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +[..]thread '[..]' panicked at 'assertion failed:[..]", + ) + .with_stdout_contains("[..]`(left == right)`[..]") + .with_stdout_contains("[..]left: `\"hello\"`,[..]") + .with_stdout_contains("[..]right: `\"nope\"`[..]") + .with_stdout_contains("[..]src/main.rs:12[..]") + .with_stdout_contains( + "\ +failures: + test_hello +", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn cargo_test_failing_test_in_test() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/main.rs", r#"pub fn main() { println!("hello"); }"#) + .file( + "tests/footest.rs", + "#[test] fn test_hello() { assert!(false) }", + ) + .build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + + p.process(&p.bin("foo")).with_stdout("hello\n").run(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[RUNNING] target/debug/deps/footest-[..][EXE] +[ERROR] test failed, to rerun pass '--test footest'", + ) + .with_stdout_contains("running 0 tests") + .with_stdout_contains( + "\ +running 1 test +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +[..]thread '[..]' panicked at 'assertion failed: false', \ + tests/footest.rs:1[..] +", + ) + .with_stdout_contains( + "\ +failures: + test_hello +", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn cargo_test_failing_test_in_lib() { + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", "#[test] fn test_hello() { assert!(false) }") + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[ERROR] test failed, to rerun pass '--lib'", + ) + .with_stdout_contains( + "\ +test test_hello ... FAILED + +failures: + +---- test_hello stdout ---- +[..]thread '[..]' panicked at 'assertion failed: false', \ + src/lib.rs:1[..] +", + ) + .with_stdout_contains( + "\ +failures: + test_hello +", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn test_with_lib_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "baz" + path = "src/main.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + /// + /// ```rust + /// extern crate foo; + /// fn main() { + /// println!("{:?}", foo::foo()); + /// } + /// ``` + /// + pub fn foo(){} + #[test] fn lib_test() {} + "#, + ) + .file( + "src/main.rs", + " + #[allow(unused_extern_crates)] + extern crate foo; + + fn main() {} + + #[test] + fn bin_test() {} + ", + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[RUNNING] target/debug/deps/baz-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test lib_test ... ok") + .with_stdout_contains("test bin_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3) + .run(); +} + +#[cargo_test] +fn test_with_deep_lib_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + "#, + ) + .file( + "src/lib.rs", + " + #[cfg(test)] + extern crate bar; + /// ``` + /// foo::foo(); + /// ``` + pub fn foo() {} + + #[test] + fn bar_test() { + bar::bar(); + } + ", + ) + .build(); + let _p2 = project() + .at("bar") + .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("src/lib.rs", "pub fn bar() {} #[test] fn foo_test() {}") + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([..]) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target[..] +[DOCTEST] foo", + ) + .with_stdout_contains("test bar_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 2) + .run(); +} + +#[cargo_test] +fn external_test_explicit() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[test]] + name = "test" + path = "src/test.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn get_hello() -> &'static str { "Hello" } + + #[test] + fn internal_test() {} + "#, + ) + .file( + "src/test.rs", + r#" + extern crate foo; + + #[test] + fn external_test() { assert_eq!(foo::get_hello(), "Hello") } + "#, + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[RUNNING] target/debug/deps/test-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test internal_test ... ok") + .with_stdout_contains("test external_test ... ok") + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn external_test_named_test() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [[test]] + name = "test" + "#, + ) + .file("src/lib.rs", "") + .file("tests/test.rs", "#[test] fn foo() {}") + .build(); + + p.cargo("test").run(); +} + +#[cargo_test] +fn external_test_implicit() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn get_hello() -> &'static str { "Hello" } + + #[test] + fn internal_test() {} + "#, + ) + .file( + "tests/external.rs", + r#" + extern crate foo; + + #[test] + fn external_test() { assert_eq!(foo::get_hello(), "Hello") } + "#, + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[RUNNING] target/debug/deps/external-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test internal_test ... ok") + .with_stdout_contains("test external_test ... ok") + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn dont_run_examples() { + let p = project() + .file("src/lib.rs", "") + .file( + "examples/dont-run-me-i-will-fail.rs", + r#" + fn main() { panic!("Examples should not be run by 'cargo test'"); } + "#, + ) + .build(); + p.cargo("test").run(); +} + +#[cargo_test] +fn pass_through_command_line() { + let p = project() + .file( + "src/lib.rs", + " + #[test] fn foo() {} + #[test] fn bar() {} + ", + ) + .build(); + + p.cargo("test bar") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +", + ) + .with_stdout_contains("running 1 test") + .with_stdout_contains("test bar ... ok") + .run(); + + p.cargo("test foo") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +", + ) + .with_stdout_contains("running 1 test") + .with_stdout_contains("test foo ... ok") + .run(); +} + +// Regression test for running cargo-test twice with +// tests in an rlib +#[cargo_test] +fn cargo_test_twice() { + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file( + "src/foo.rs", + r#" + #![crate_type = "rlib"] + + #[test] + fn dummy_test() { } + "#, + ) + .build(); + + for _ in 0..2 { + p.cargo("test").run(); + } +} + +#[cargo_test] +fn lib_bin_same_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + [[bin]] + name = "foo" + "#, + ) + .file("src/lib.rs", "#[test] fn lib_test() {}") + .file( + "src/main.rs", + " + #[allow(unused_extern_crates)] + extern crate foo; + + #[test] + fn bin_test() {} + ", + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[RUNNING] target/debug/deps/foo-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains_n("test [..] ... ok", 2) + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn lib_with_standard_name() { + let p = project() + .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) + .file( + "src/lib.rs", + " + /// ``` + /// syntax::foo(); + /// ``` + pub fn foo() {} + + #[test] + fn foo_test() {} + ", + ) + .file( + "tests/test.rs", + " + extern crate syntax; + + #[test] + fn test() { syntax::foo() } + ", + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] syntax v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/syntax-[..][EXE] +[RUNNING] target/debug/deps/test-[..][EXE] +[DOCTEST] syntax", + ) + .with_stdout_contains("test foo_test ... ok") + .with_stdout_contains("test test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3) + .run(); +} + +#[cargo_test] +fn lib_with_standard_name2() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + name = "syntax" + test = false + doctest = false + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] syntax v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/syntax-[..][EXE]", + ) + .with_stdout_contains("test test ... ok") + .run(); +} + +#[cargo_test] +fn lib_without_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] syntax v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/syntax-[..][EXE]", + ) + .with_stdout_contains("test test ... ok") + .run(); +} + +#[cargo_test] +fn bin_without_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[bin]] + path = "src/main.rs" + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .build(); + + p.cargo("test") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + binary target bin.name is required", + ) + .run(); +} + +#[cargo_test] +fn bench_without_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[bench]] + path = "src/bench.rs" + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .file( + "src/bench.rs", + " + #![feature(test)] + extern crate syntax; + extern crate test; + + #[bench] + fn external_bench(_b: &mut test::Bencher) {} + ", + ) + .build(); + + p.cargo("test") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + benchmark target bench.name is required", + ) + .run(); +} + +#[cargo_test] +fn test_without_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[test]] + path = "src/test.rs" + "#, + ) + .file( + "src/lib.rs", + r#" + pub fn foo() {} + pub fn get_hello() -> &'static str { "Hello" } + "#, + ) + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .file( + "src/test.rs", + r#" + extern crate syntax; + + #[test] + fn external_test() { assert_eq!(syntax::get_hello(), "Hello") } + "#, + ) + .build(); + + p.cargo("test") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + test target test.name is required", + ) + .run(); +} + +#[cargo_test] +fn example_without_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "syntax" + version = "0.0.1" + authors = [] + + [lib] + test = false + doctest = false + + [[example]] + path = "examples/example.rs" + "#, + ) + .file("src/lib.rs", "pub fn foo() {}") + .file( + "src/main.rs", + " + extern crate syntax; + + fn main() {} + + #[test] + fn test() { syntax::foo() } + ", + ) + .file( + "examples/example.rs", + r#" + extern crate syntax; + + fn main() { + println!("example1"); + } + "#, + ) + .build(); + + p.cargo("test") + .with_status(101) + .with_stderr( + "\ +[ERROR] failed to parse manifest at `[..]` + +Caused by: + example target example.name is required", + ) + .run(); +} + +#[cargo_test] +fn bin_there_for_integration() { + let p = project() + .file( + "src/main.rs", + " + fn main() { std::process::exit(101); } + #[test] fn main_test() {} + ", + ) + .file( + "tests/foo.rs", + r#" + use std::process::Command; + #[test] + fn test_test() { + let status = Command::new("target/debug/foo").status().unwrap(); + assert_eq!(status.code(), Some(101)); + } + "#, + ) + .build(); + + p.cargo("test -v") + .with_stdout_contains("test main_test ... ok") + .with_stdout_contains("test test_test ... ok") + .run(); +} + +#[cargo_test] +fn test_dylib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate_type = ["dylib"] + + [dependencies.bar] + path = "bar" + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar as the_bar; + + pub fn bar() { the_bar::baz(); } + + #[test] + fn foo() { bar(); } + "#, + ) + .file( + "tests/test.rs", + r#" + extern crate foo as the_foo; + + #[test] + fn foo() { the_foo::bar(); } + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [lib] + name = "bar" + crate_type = ["dylib"] + "#, + ) + .file("bar/src/lib.rs", "pub fn baz() {}") + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] bar v0.0.1 ([CWD]/bar) +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[RUNNING] target/debug/deps/test-[..][EXE]", + ) + .with_stdout_contains_n("test foo ... ok", 2) + .run(); + + p.root().move_into_the_past(); + p.cargo("test") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[RUNNING] target/debug/deps/test-[..][EXE]", + ) + .with_stdout_contains_n("test foo ... ok", 2) + .run(); +} + +#[cargo_test] +fn test_twice_with_build_cmd() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "#[test] fn foo() {}") + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests") + .run(); + + p.cargo("test") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn test_then_build() { + let p = project().file("src/lib.rs", "#[test] fn foo() {}").build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test foo ... ok") + .with_stdout_contains("running 0 tests") + .run(); + + p.cargo("build").with_stdout("").run(); +} + +#[cargo_test] +fn test_no_run() { + let p = project() + .file("src/lib.rs", "#[test] fn foo() { panic!() }") + .build(); + + p.cargo("test --no-run") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn test_run_specific_bin_target() { + let prj = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="bin1" + path="src/bin1.rs" + + [[bin]] + name="bin2" + path="src/bin2.rs" + "#, + ) + .file("src/bin1.rs", "#[test] fn test1() { }") + .file("src/bin2.rs", "#[test] fn test2() { }") + .build(); + + prj.cargo("test --bin bin2") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/bin2-[..][EXE]", + ) + .with_stdout_contains("test test2 ... ok") + .run(); +} + +#[cargo_test] +fn test_run_implicit_bin_target() { + let prj = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#, + ) + .file( + "src/mybin.rs", + "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file( + "examples/myexm.rs", + "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .build(); + + prj.cargo("test --bins") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/mybin-[..][EXE]", + ) + .with_stdout_contains("test test_in_bin ... ok") + .run(); +} + +#[cargo_test] +fn test_run_specific_test_target() { + let prj = project() + .file("src/bin/a.rs", "fn main() { }") + .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }") + .file("tests/a.rs", "#[test] fn test_a() { }") + .file("tests/b.rs", "#[test] fn test_b() { }") + .build(); + + prj.cargo("test --test b") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/b-[..][EXE]", + ) + .with_stdout_contains("test test_b ... ok") + .run(); +} + +#[cargo_test] +fn test_run_implicit_test_target() { + let prj = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#, + ) + .file( + "src/mybin.rs", + "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file( + "examples/myexm.rs", + "fn main() { compile_error!(\"Don't build me!\"); }", + ) + .build(); + + prj.cargo("test --tests") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/mybin-[..][EXE] +[RUNNING] target/debug/deps/mytest-[..][EXE]", + ) + .with_stdout_contains("test test_in_test ... ok") + .run(); +} + +#[cargo_test] +fn test_run_implicit_bench_target() { + let prj = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name="mybin" + path="src/mybin.rs" + "#, + ) + .file( + "src/mybin.rs", + "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file( + "examples/myexm.rs", + "fn main() { compile_error!(\"Don't build me!\"); }", + ) + .build(); + + prj.cargo("test --benches") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/mybin-[..][EXE] +[RUNNING] target/debug/deps/mybench-[..][EXE]", + ) + .with_stdout_contains("test test_in_bench ... ok") + .run(); +} + +#[cargo_test] +fn test_run_implicit_example_target() { + let prj = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "mybin" + path = "src/mybin.rs" + + [[example]] + name = "myexm1" + + [[example]] + name = "myexm2" + test = true + "#, + ) + .file( + "src/mybin.rs", + "#[test] fn test_in_bin() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file("tests/mytest.rs", "#[test] fn test_in_test() { }") + .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") + .file( + "examples/myexm1.rs", + "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .file( + "examples/myexm2.rs", + "#[test] fn test_in_exm() { } + fn main() { panic!(\"Don't execute me!\"); }", + ) + .build(); + + // Compiles myexm1 as normal, but does not run it. + prj.cargo("test -v") + .with_stderr_contains("[RUNNING] `rustc [..]myexm1.rs [..]--crate-type bin[..]") + .with_stderr_contains("[RUNNING] `rustc [..]myexm2.rs [..]--test[..]") + .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]") + .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") + .run(); + + // Only tests myexm2. + prj.cargo("test --tests") + .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]") + .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") + .run(); + + // Tests all examples. + prj.cargo("test --examples") + .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") + .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") + .run(); + + // Test an example, even without `test` set. + prj.cargo("test --example myexm1") + .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") + .run(); + + // Tests all examples. + prj.cargo("test --all-targets") + .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") + .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") + .run(); +} + +#[cargo_test] +fn test_filtered_excludes_compiling_examples() { + let p = project() + .file( + "src/lib.rs", + "#[cfg(test)] mod tests { #[test] fn foo() { assert!(true); } }", + ) + .file("examples/ex1.rs", "fn main() {}") + .build(); + + p.cargo("test -v foo") + .with_stdout( + " +running 1 test +test tests::foo ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out + +", + ) + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc --crate-name foo src/lib.rs [..] --test [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[CWD]/target/debug/deps/foo-[..] foo` +", + ) + .with_stderr_does_not_contain("[RUNNING][..]rustc[..]ex1[..]") + .run(); +} + +#[cargo_test] +fn test_no_harness() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [[bin]] + name = "foo" + test = false + + [[test]] + name = "bar" + path = "foo.rs" + harness = false + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("foo.rs", "fn main() {}") + .build(); + + p.cargo("test -- --nocapture") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/bar-[..][EXE] +", + ) + .run(); +} + +#[cargo_test] +fn selective_testing() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [lib] + name = "foo" + doctest = false + "#, + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + doctest = false + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d1/src/main.rs", + "#[allow(unused_extern_crates)] extern crate d1; fn main() {}", + ) + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [lib] + name = "d2" + doctest = false + "#, + ) + .file("d2/src/lib.rs", "") + .file( + "d2/src/main.rs", + "#[allow(unused_extern_crates)] extern crate d2; fn main() {}", + ); + let p = p.build(); + + println!("d1"); + p.cargo("test -p d1") + .with_stderr( + "\ +[COMPILING] d1 v0.0.1 ([CWD]/d1) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/d1-[..][EXE] +[RUNNING] target/debug/deps/d1-[..][EXE]", + ) + .with_stdout_contains_n("running 0 tests", 2) + .run(); + + println!("d2"); + p.cargo("test -p d2") + .with_stderr( + "\ +[COMPILING] d2 v0.0.1 ([CWD]/d2) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/d2-[..][EXE] +[RUNNING] target/debug/deps/d2-[..][EXE]", + ) + .with_stdout_contains_n("running 0 tests", 2) + .run(); + + println!("whole"); + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE]", + ) + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn almost_cyclic_but_not_quite() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies.b] + path = "b" + [dev-dependencies.c] + path = "c" + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(test)] extern crate b; + #[cfg(test)] extern crate c; + "#, + ) + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + authors = [] + + [dependencies.foo] + path = ".." + "#, + ) + .file( + "b/src/lib.rs", + r#" + #[allow(unused_extern_crates)] + extern crate foo; + "#, + ) + .file("c/Cargo.toml", &basic_manifest("c", "0.0.1")) + .file("c/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.cargo("test").run(); +} + +#[cargo_test] +fn build_then_selective_test() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.b] + path = "b" + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate b;", + ) + .file( + "src/main.rs", + r#" + #[allow(unused_extern_crates)] + extern crate b; + #[allow(unused_extern_crates)] + extern crate foo; + fn main() {} + "#, + ) + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + p.root().move_into_the_past(); + p.cargo("test -p b").run(); +} + +#[cargo_test] +fn example_dev_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies.bar] + path = "bar" + "#, + ) + .file("src/lib.rs", "") + .file("examples/e1.rs", "extern crate bar; fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file( + "bar/src/lib.rs", + r#" + // make sure this file takes awhile to compile + macro_rules! f0( () => (1) ); + macro_rules! f1( () => ({(f0!()) + (f0!())}) ); + macro_rules! f2( () => ({(f1!()) + (f1!())}) ); + macro_rules! f3( () => ({(f2!()) + (f2!())}) ); + macro_rules! f4( () => ({(f3!()) + (f3!())}) ); + macro_rules! f5( () => ({(f4!()) + (f4!())}) ); + macro_rules! f6( () => ({(f5!()) + (f5!())}) ); + macro_rules! f7( () => ({(f6!()) + (f6!())}) ); + macro_rules! f8( () => ({(f7!()) + (f7!())}) ); + pub fn bar() { + f8!(); + } + "#, + ) + .build(); + p.cargo("test").run(); + p.cargo("run --example e1 --release -v").run(); +} + +#[cargo_test] +fn selective_testing_with_docs() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// not valid rust + /// ``` + pub fn foo() {} + "#, + ) + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + path = "d1.rs" + "#, + ) + .file("d1/d1.rs", ""); + let p = p.build(); + + p.cargo("test -p d1") + .with_stderr( + "\ +[COMPILING] d1 v0.0.1 ([CWD]/d1) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/d1[..][EXE] +[DOCTEST] d1", + ) + .with_stdout_contains_n("running 0 tests", 2) + .run(); +} + +#[cargo_test] +fn example_bin_same_name() { + let p = project() + .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) + .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) + .build(); + + p.cargo("test --no-run -v") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + assert!(!p.bin("foo").is_file()); + assert!(p.bin("examples/foo").is_file()); + + p.process(&p.bin("examples/foo")) + .with_stdout("example\n") + .run(); + + p.cargo("run") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] [..]", + ) + .with_stdout("bin") + .run(); + assert!(p.bin("foo").is_file()); +} + +#[cargo_test] +fn test_with_example_twice() { + let p = project() + .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) + .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) + .build(); + + println!("first"); + p.cargo("test -v").run(); + assert!(p.bin("examples/foo").is_file()); + println!("second"); + p.cargo("test -v").run(); + assert!(p.bin("examples/foo").is_file()); +} + +#[cargo_test] +fn example_with_dev_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + test = false + doctest = false + + [dev-dependencies.a] + path = "a" + "#, + ) + .file("src/lib.rs", "") + .file( + "examples/ex.rs", + "#[allow(unused_extern_crates)] extern crate a; fn main() {}", + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("test -v") + .with_stderr( + "\ +[..] +[..] +[..] +[..] +[RUNNING] `rustc --crate-name ex [..] --extern a=[..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn bin_is_preserved() { + let p = project() + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .build(); + + p.cargo("build -v").run(); + assert!(p.bin("foo").is_file()); + + println!("testing"); + p.cargo("test -v").run(); + assert!(p.bin("foo").is_file()); +} + +#[cargo_test] +fn bad_example() { + let p = project().file("src/lib.rs", ""); + let p = p.build(); + + p.cargo("run --example foo") + .with_status(101) + .with_stderr("[ERROR] no example target named `foo`") + .run(); + p.cargo("run --bin foo") + .with_status(101) + .with_stderr("[ERROR] no bin target named `foo`") + .run(); +} + +#[cargo_test] +fn doctest_feature() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + [features] + bar = [] + "#, + ) + .file( + "src/lib.rs", + r#" + /// ```rust + /// assert_eq!(foo::foo(), 1); + /// ``` + #[cfg(feature = "bar")] + pub fn foo() -> i32 { 1 } + "#, + ) + .build(); + + p.cargo("test --features bar") + .with_stderr( + "\ +[COMPILING] foo [..] +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("running 0 tests") + .with_stdout_contains("test [..] ... ok") + .run(); +} + +#[cargo_test] +fn dashes_to_underscores() { + let p = project() + .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1")) + .file( + "src/lib.rs", + r#" + /// ``` + /// assert_eq!(foo_bar::foo(), 1); + /// ``` + pub fn foo() -> i32 { 1 } + "#, + ) + .build(); + + p.cargo("test -v").run(); +} + +#[cargo_test] +fn doctest_dev_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + b = { path = "b" } + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// extern crate b; + /// ``` + pub fn foo() {} + "#, + ) + .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("test -v").run(); +} + +#[cargo_test] +fn filter_no_doc_tests() { + let p = project() + .file( + "src/lib.rs", + r#" + /// ``` + /// extern crate b; + /// ``` + pub fn foo() {} + "#, + ) + .file("tests/foo.rs", "") + .build(); + + p.cargo("test --test=foo") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo[..][EXE]", + ) + .with_stdout_contains("running 0 tests") + .run(); +} + +#[cargo_test] +fn dylib_doctest() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["rlib", "dylib"] + test = false + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// foo::foo(); + /// ``` + pub fn foo() {} + "#, + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[DOCTEST] foo", + ) + .with_stdout_contains("test [..] ... ok") + .run(); +} + +#[cargo_test] +fn dylib_doctest2() { + // Can't doc-test dylibs, as they're statically linked together. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + name = "foo" + crate-type = ["dylib"] + test = false + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// foo::foo(); + /// ``` + pub fn foo() {} + "#, + ) + .build(); + + p.cargo("test").with_stdout("").run(); +} + +#[cargo_test] +fn cyclic_dev_dep_doc_test() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + bar = { path = "bar" } + "#, + ) + .file( + "src/lib.rs", + r#" + //! ``` + //! extern crate bar; + //! ``` + "#, + ) + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = { path = ".." } + "#, + ) + .file( + "bar/src/lib.rs", + r#" + #[allow(unused_extern_crates)] + extern crate foo; + "#, + ) + .build(); + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[COMPILING] bar v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("running 0 tests") + .with_stdout_contains("test [..] ... ok") + .run(); +} + +#[cargo_test] +fn dev_dep_with_build_script() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dev-dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file("examples/foo.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + build = "build.rs" + "#, + ) + .file("bar/src/lib.rs", "") + .file("bar/build.rs", "fn main() {}") + .build(); + p.cargo("test").run(); +} + +#[cargo_test] +fn no_fail_fast() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn add_one(x: i32) -> i32{ + x + 1 + } + + /// ```rust + /// use foo::sub_one; + /// assert_eq!(sub_one(101), 100); + /// ``` + pub fn sub_one(x: i32) -> i32{ + x - 1 + } + "#, + ) + .file( + "tests/test_add_one.rs", + r#" + extern crate foo; + use foo::*; + + #[test] + fn add_one_test() { + assert_eq!(add_one(1), 2); + } + + #[test] + fn fail_add_one_test() { + assert_eq!(add_one(1), 1); + } + "#, + ) + .file( + "tests/test_sub_one.rs", + r#" + extern crate foo; + use foo::*; + + #[test] + fn sub_one_test() { + assert_eq!(sub_one(1), 0); + } + "#, + ) + .build(); + p.cargo("test --no-fail-fast") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..][EXE] +[RUNNING] target/debug/deps/test_add_one-[..][EXE]", + ) + .with_stdout_contains("running 0 tests") + .with_stderr_contains( + "\ +[RUNNING] target/debug/deps/test_sub_one-[..][EXE] +[DOCTEST] foo", + ) + .with_stdout_contains("test result: FAILED. [..]") + .with_stdout_contains("test sub_one_test ... ok") + .with_stdout_contains_n("test [..] ... ok", 3) + .run(); +} + +#[cargo_test] +fn test_multiple_packages() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [lib] + name = "foo" + doctest = false + "#, + ) + .file("src/lib.rs", "") + .file( + "d1/Cargo.toml", + r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + doctest = false + "#, + ) + .file("d1/src/lib.rs", "") + .file( + "d2/Cargo.toml", + r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [lib] + name = "d2" + doctest = false + "#, + ) + .file("d2/src/lib.rs", ""); + let p = p.build(); + + p.cargo("test -p d1 -p d2") + .with_stderr_contains("[RUNNING] target/debug/deps/d1-[..][EXE]") + .with_stderr_contains("[RUNNING] target/debug/deps/d2-[..][EXE]") + .with_stdout_contains_n("running 0 tests", 2) + .run(); +} + +#[cargo_test] +fn bin_does_not_rebuild_tests() { + let p = project() + .file("src/lib.rs", "") + .file("src/main.rs", "fn main() {}") + .file("tests/foo.rs", ""); + let p = p.build(); + + p.cargo("test -v").run(); + + sleep_ms(1000); + File::create(&p.root().join("src/main.rs")) + .unwrap() + .write_all(b"fn main() { 3; }") + .unwrap(); + + p.cargo("test -v --no-run") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..] src/main.rs [..]` +[RUNNING] `rustc [..] src/main.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn selective_test_wonky_profile() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.release] + opt-level = 2 + + [dependencies] + a = { path = "a" } + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", ""); + let p = p.build(); + + p.cargo("test -v --no-run --release -p foo -p a").run(); +} + +#[cargo_test] +fn selective_test_optional_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a", optional = true } + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", ""); + let p = p.build(); + + p.cargo("test -v --no-run --features a -p a") + .with_stderr( + "\ +[COMPILING] a v0.0.1 ([..]) +[RUNNING] `rustc [..] a/src/lib.rs [..]` +[RUNNING] `rustc [..] a/src/lib.rs [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn only_test_docs() { + let p = project() + .file( + "src/lib.rs", + r#" + #[test] + fn foo() { + let a: u32 = "hello"; + } + + /// ``` + /// foo::bar(); + /// println!("ok"); + /// ``` + pub fn bar() { + } + "#, + ) + .file("tests/foo.rs", "this is not rust"); + let p = p.build(); + + p.cargo("test --doc") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[DOCTEST] foo", + ) + .with_stdout_contains("test [..] ... ok") + .run(); +} + +#[cargo_test] +fn test_panic_abort_with_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [profile.dev] + panic = 'abort' + "#, + ) + .file( + "src/lib.rs", + r#" + extern crate bar; + + #[test] + fn foo() {} + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) + .file("bar/src/lib.rs", "") + .build(); + p.cargo("test -v").run(); +} + +#[cargo_test] +fn cfg_test_even_with_no_harness() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [lib] + harness = false + doctest = false + "#, + ) + .file( + "src/lib.rs", + r#"#[cfg(test)] fn main() { println!("hello!"); }"#, + ) + .build(); + p.cargo("test -v") + .with_stdout("hello!\n") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([..]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `[..]` +", + ) + .run(); +} + +#[cargo_test] +fn panic_abort_multiple() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [profile.release] + panic = 'abort' + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate a;", + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + p.cargo("test --release -v -p foo -p a").run(); +} + +#[cargo_test] +fn pass_correct_cfgs_flags_to_rustdoc() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + + [features] + default = ["feature_a/default"] + nightly = ["feature_a/nightly"] + + [dependencies.feature_a] + path = "libs/feature_a" + default-features = false + "#, + ) + .file( + "src/lib.rs", + r#" + #[cfg(test)] + mod tests { + #[test] + fn it_works() { + assert!(true); + } + } + "#, + ) + .file( + "libs/feature_a/Cargo.toml", + r#" + [package] + name = "feature_a" + version = "0.1.0" + authors = [] + + [features] + default = ["mock_serde_codegen"] + nightly = ["mock_serde_derive"] + + [dependencies] + mock_serde_derive = { path = "../mock_serde_derive", optional = true } + + [build-dependencies] + mock_serde_codegen = { path = "../mock_serde_codegen", optional = true } + "#, + ) + .file( + "libs/feature_a/src/lib.rs", + r#" + #[cfg(feature = "mock_serde_derive")] + const MSG: &'static str = "This is safe"; + + #[cfg(feature = "mock_serde_codegen")] + const MSG: &'static str = "This is risky"; + + pub fn get() -> &'static str { + MSG + } + "#, + ) + .file( + "libs/mock_serde_derive/Cargo.toml", + &basic_manifest("mock_serde_derive", "0.1.0"), + ) + .file("libs/mock_serde_derive/src/lib.rs", "") + .file( + "libs/mock_serde_codegen/Cargo.toml", + &basic_manifest("mock_serde_codegen", "0.1.0"), + ) + .file("libs/mock_serde_codegen/src/lib.rs", ""); + let p = p.build(); + + p.cargo("test --package feature_a --verbose") + .with_stderr_contains( + "\ +[DOCTEST] feature_a +[RUNNING] `rustdoc --test [..]mock_serde_codegen[..]`", + ) + .run(); + + p.cargo("test --verbose") + .with_stderr_contains( + "\ +[DOCTEST] foo +[RUNNING] `rustdoc --test [..]feature_a[..]`", + ) + .run(); +} + +#[cargo_test] +fn test_release_ignore_panic() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [profile.test] + panic = 'abort' + [profile.release] + panic = 'abort' + "#, + ) + .file( + "src/lib.rs", + "#[allow(unused_extern_crates)] extern crate a;", + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", ""); + let p = p.build(); + println!("test"); + p.cargo("test -v").run(); + println!("bench"); + p.cargo("bench -v").run(); +} + +#[cargo_test] +fn test_many_with_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [features] + foo = [] + + [workspace] + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("test -v -p a -p foo --features foo").run(); +} + +#[cargo_test] +fn test_all_workspace() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "#[test] fn foo_test() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "#[test] fn bar_test() {}") + .build(); + + p.cargo("test --all") + .with_stdout_contains("test foo_test ... ok") + .with_stdout_contains("test bar_test ... ok") + .run(); +} + +#[cargo_test] +fn test_all_exclude() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["bar", "baz"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/lib.rs", "#[test] pub fn bar() {}") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/lib.rs", "#[test] pub fn baz() { assert!(false); }") + .build(); + + p.cargo("test --all --exclude baz") + .with_stdout_contains( + "running 1 test +test bar ... ok", + ) + .run(); +} + +#[cargo_test] +fn test_all_virtual_manifest() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/lib.rs", "#[test] fn a() {}") + .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) + .file("b/src/lib.rs", "#[test] fn b() {}") + .build(); + + p.cargo("test --all") + .with_stdout_contains("test a ... ok") + .with_stdout_contains("test b ... ok") + .run(); +} + +#[cargo_test] +fn test_virtual_manifest_all_implied() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/lib.rs", "#[test] fn a() {}") + .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) + .file("b/src/lib.rs", "#[test] fn b() {}") + .build(); + + p.cargo("test") + .with_stdout_contains("test a ... ok") + .with_stdout_contains("test b ... ok") + .run(); +} + +#[cargo_test] +fn test_all_member_dependency_same_name() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + a = "0.1.0" + "#, + ) + .file("a/src/lib.rs", "#[test] fn a() {}") + .build(); + + Package::new("a", "0.1.0").publish(); + + p.cargo("test --all") + .with_stdout_contains("test a ... ok") + .run(); +} + +#[cargo_test] +fn doctest_only_with_dev_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dev-dependencies] + b = { path = "b" } + "#, + ) + .file( + "src/lib.rs", + r#" + /// ``` + /// extern crate b; + /// + /// b::b(); + /// ``` + pub fn a() {} + "#, + ) + .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) + .file("b/src/lib.rs", "pub fn b() {}") + .build(); + + p.cargo("test --doc -v").run(); +} + +#[cargo_test] +fn test_many_targets() { + let p = project() + .file( + "src/bin/a.rs", + r#" + fn main() {} + #[test] fn bin_a() {} + "#, + ) + .file( + "src/bin/b.rs", + r#" + fn main() {} + #[test] fn bin_b() {} + "#, + ) + .file( + "src/bin/c.rs", + r#" + fn main() {} + #[test] fn bin_c() { panic!(); } + "#, + ) + .file( + "examples/a.rs", + r#" + fn main() {} + #[test] fn example_a() {} + "#, + ) + .file( + "examples/b.rs", + r#" + fn main() {} + #[test] fn example_b() {} + "#, + ) + .file("examples/c.rs", "#[test] fn example_c() { panic!(); }") + .file("tests/a.rs", "#[test] fn test_a() {}") + .file("tests/b.rs", "#[test] fn test_b() {}") + .file("tests/c.rs", "does not compile") + .build(); + + p.cargo("test --verbose --bin a --bin b --example a --example b --test a --test b") + .with_stdout_contains("test bin_a ... ok") + .with_stdout_contains("test bin_b ... ok") + .with_stdout_contains("test test_a ... ok") + .with_stdout_contains("test test_b ... ok") + .with_stderr_contains("[RUNNING] `rustc --crate-name a examples/a.rs [..]`") + .with_stderr_contains("[RUNNING] `rustc --crate-name b examples/b.rs [..]`") + .run(); +} + +#[cargo_test] +fn doctest_and_registry() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "a" + version = "0.1.0" + + [dependencies] + b = { path = "b" } + c = { path = "c" } + + [workspace] + "#, + ) + .file("src/lib.rs", "") + .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) + .file( + "b/src/lib.rs", + " + /// ``` + /// b::foo(); + /// ``` + pub fn foo() {} + ", + ) + .file( + "c/Cargo.toml", + r#" + [project] + name = "c" + version = "0.1.0" + + [dependencies] + b = "0.1" + "#, + ) + .file("c/src/lib.rs", "") + .build(); + + Package::new("b", "0.1.0").publish(); + + p.cargo("test --all -v").run(); +} + +#[cargo_test] +fn cargo_test_env() { + let src = format!( + r#" + #![crate_type = "rlib"] + + #[test] + fn env_test() {{ + use std::env; + eprintln!("{{}}", env::var("{}").unwrap()); + }} + "#, + cargo::CARGO_ENV + ); + + let p = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", &src) + .build(); + + let cargo = cargo_exe().canonicalize().unwrap(); + p.cargo("test --lib -- --nocapture") + .with_stderr_contains(cargo.to_str().unwrap()) + .with_stdout_contains("test env_test ... ok") + .run(); +} + +#[cargo_test] +fn test_order() { + let p = project() + .file("src/lib.rs", "#[test] fn test_lib() {}") + .file("tests/a.rs", "#[test] fn test_a() {}") + .file("tests/z.rs", "#[test] fn test_z() {}") + .build(); + + p.cargo("test --all") + .with_stdout_contains( + " +running 1 test +test test_lib ... ok + +test result: ok. [..] + + +running 1 test +test test_a ... ok + +test result: ok. [..] + + +running 1 test +test test_z ... ok + +test result: ok. [..] +", + ) + .run(); +} + +#[cargo_test] +fn cyclic_dev() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dev-dependencies] + foo = { path = "." } + "#, + ) + .file("src/lib.rs", "#[test] fn test_lib() {}") + .file("tests/foo.rs", "extern crate foo;") + .build(); + + p.cargo("test --all").run(); +} + +#[cargo_test] +fn publish_a_crate_without_tests() { + Package::new("testless", "0.1.0") + .file( + "Cargo.toml", + r#" + [project] + name = "testless" + version = "0.1.0" + exclude = ["tests/*"] + + [[test]] + name = "a_test" + "#, + ) + .file("src/lib.rs", "") + // In real life, the package will have a test, + // which would be excluded from .crate file by the + // `exclude` field. Our test harness does not honor + // exclude though, so let's just not add the file! + // .file("tests/a_test.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + + [dependencies] + testless = "0.1.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("test").run(); + p.cargo("test --package testless").run(); +} + +#[cargo_test] +fn find_dependency_of_proc_macro_dependency_with_target() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["root", "proc_macro_dep"] + "#, + ) + .file( + "root/Cargo.toml", + r#" + [project] + name = "root" + version = "0.1.0" + authors = [] + + [dependencies] + proc_macro_dep = { path = "../proc_macro_dep" } + "#, + ) + .file( + "root/src/lib.rs", + r#" + #[macro_use] + extern crate proc_macro_dep; + + #[derive(Noop)] + pub struct X; + "#, + ) + .file( + "proc_macro_dep/Cargo.toml", + r#" + [project] + name = "proc_macro_dep" + version = "0.1.0" + authors = [] + + [lib] + proc-macro = true + + [dependencies] + baz = "^0.1" + "#, + ) + .file( + "proc_macro_dep/src/lib.rs", + r#" + extern crate baz; + extern crate proc_macro; + use proc_macro::TokenStream; + + #[proc_macro_derive(Noop)] + pub fn noop(_input: TokenStream) -> TokenStream { + "".parse().unwrap() + } + "#, + ) + .build(); + Package::new("bar", "0.1.0").publish(); + Package::new("baz", "0.1.0") + .dep("bar", "0.1") + .file("src/lib.rs", "extern crate bar;") + .publish(); + p.cargo("test --all --target").arg(rustc_host()).run(); +} + +#[cargo_test] +fn test_hint_not_masked_by_doctest() { + let p = project() + .file( + "src/lib.rs", + r#" + /// ``` + /// assert_eq!(1, 1); + /// ``` + pub fn this_works() {} + "#, + ) + .file( + "tests/integ.rs", + r#" + #[test] + fn this_fails() { + panic!(); + } + "#, + ) + .build(); + p.cargo("test --no-fail-fast") + .with_status(101) + .with_stdout_contains("test this_fails ... FAILED") + .with_stdout_contains("[..]this_works (line [..]ok") + .with_stderr_contains( + "[ERROR] test failed, to rerun pass \ + '--test integ'", + ) + .run(); +} + +#[cargo_test] +fn test_hint_workspace_virtual() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/lib.rs", "#[test] fn t1() {}") + .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) + .file("b/src/lib.rs", "#[test] fn t1() {assert!(false)}") + .build(); + + p.cargo("test") + .with_stderr_contains("[ERROR] test failed, to rerun pass '-p b --lib'") + .with_status(101) + .run(); + p.cargo("test") + .cwd("b") + .with_stderr_contains("[ERROR] test failed, to rerun pass '--lib'") + .with_status(101) + .run(); +} + +#[cargo_test] +fn test_hint_workspace_nonvirtual() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["a"] + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/lib.rs", "#[test] fn t1() {assert!(false)}") + .build(); + + p.cargo("test --all") + .with_stderr_contains("[ERROR] test failed, to rerun pass '-p a --lib'") + .with_status(101) + .run(); + p.cargo("test -p a") + .with_stderr_contains("[ERROR] test failed, to rerun pass '-p a --lib'") + .with_status(101) + .run(); +} + +#[cargo_test] +fn json_artifact_includes_test_flag() { + // Verify that the JSON artifact output includes `test` flag. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [profile.test] + opt-level = 1 + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("test --lib -v --message-format=json") + .with_json( + r#" + { + "reason":"compiler-artifact", + "profile": { + "debug_assertions": true, + "debuginfo": 2, + "opt_level": "1", + "overflow_checks": true, + "test": true + }, + "executable": "[..]/foo-[..]", + "features": [], + "package_id":"foo 0.0.1 ([..])", + "target":{ + "kind":["lib"], + "crate_types":["lib"], + "doctest": true, + "edition": "2015", + "name":"foo", + "src_path":"[..]lib.rs" + }, + "filenames":["[..]/foo-[..]"], + "fresh": false + } +"#, + ) + .run(); +} + +#[cargo_test] +fn json_artifact_includes_executable_for_library_tests() { + let p = project() + .file("src/main.rs", "fn main() { }") + .file("src/lib.rs", r#"#[test] fn lib_test() {}"#) + .build(); + + p.cargo("test --lib -v --no-run --message-format=json") + .with_json( + r#" + { + "executable": "[..]/foo/target/debug/foo-[..][EXE]", + "features": [], + "filenames": "{...}", + "fresh": false, + "package_id": "foo 0.0.1 ([..])", + "profile": "{...}", + "reason": "compiler-artifact", + "target": { + "crate_types": [ "lib" ], + "kind": [ "lib" ], + "doctest": true, + "edition": "2015", + "name": "foo", + "src_path": "[..]/foo/src/lib.rs" + } + } + "#, + ) + .run(); +} + +#[cargo_test] +fn json_artifact_includes_executable_for_integration_tests() { + let p = project() + .file( + "tests/integration_test.rs", + r#"#[test] fn integration_test() {}"#, + ) + .build(); + + p.cargo("test -v --no-run --message-format=json --test integration_test") + .with_json( + r#" + { + "executable": "[..]/foo/target/debug/integration_test-[..][EXE]", + "features": [], + "filenames": "{...}", + "fresh": false, + "package_id": "foo 0.0.1 ([..])", + "profile": "{...}", + "reason": "compiler-artifact", + "target": { + "crate_types": [ "bin" ], + "kind": [ "test" ], + "doctest": false, + "edition": "2015", + "name": "integration_test", + "src_path": "[..]/foo/tests/integration_test.rs" + } + } + "#, + ) + .run(); +} + +#[cargo_test] +fn test_build_script_links() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + links = 'something' + + [lib] + test = false + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .build(); + + p.cargo("test --no-run").run(); +} + +#[cargo_test] +fn doctest_skip_staticlib() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [lib] + crate-type = ["staticlib"] + "#, + ) + .file( + "src/lib.rs", + r#" + //! ``` + //! assert_eq!(1,2); + //! ``` + "#, + ) + .build(); + + p.cargo("test --doc") + .with_status(101) + .with_stderr( + "\ +[WARNING] doc tests are not supported for crate type(s) `staticlib` in package `foo` +[ERROR] no library targets found in package `foo`", + ) + .run(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo [..] +[FINISHED] dev [..] +[RUNNING] target/debug/deps/foo-[..]", + ) + .run(); +} + +#[cargo_test] +fn can_not_mix_doc_tests_and_regular_tests() { + let p = project() + .file( + "src/lib.rs", + "\ +/// ``` +/// assert_eq!(1, 1) +/// ``` +pub fn foo() -> u8 { 1 } + +#[cfg(test)] mod tests { + #[test] fn it_works() { assert_eq!(2 + 2, 4); } +} +", + ) + .build(); + + p.cargo("test") + .with_stderr( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..] +[DOCTEST] foo +", + ) + .with_stdout( + " +running 1 test +test tests::it_works ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out + + +running 1 test +test src/lib.rs - foo (line 1) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out +\n", + ) + .run(); + + p.cargo("test --lib") + .with_stderr( + "\ +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] target/debug/deps/foo-[..]\n", + ) + .with_stdout( + " +running 1 test +test tests::it_works ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out +\n", + ) + .run(); + + // This has been modified to attempt to diagnose spurious errors on CI. + // For some reason, this is recompiling the lib when it shouldn't. If the + // root cause is ever found, the changes here should be reverted. + // See https://github.com/rust-lang/cargo/issues/6887 + p.cargo("test --doc -vv") + .with_stderr_does_not_contain("[COMPILING] foo [..]") + .with_stderr_contains("[DOCTEST] foo") + .with_stdout( + " +running 1 test +test src/lib.rs - foo (line 1) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out + +", + ) + .env("CARGO_LOG", "cargo=trace") + .run(); + + p.cargo("test --lib --doc") + .with_status(101) + .with_stderr("[ERROR] Can't mix --doc with other target selecting options\n") + .run(); +} + +#[cargo_test] +fn can_not_no_run_doc_tests() { + let p = project() + .file( + "src/lib.rs", + r#" +/// ``` +/// let _x = 1 + "foo"; +/// ``` +pub fn foo() -> u8 { 1 } +"#, + ) + .build(); + + p.cargo("test --doc --no-run") + .with_status(101) + .with_stderr("[ERROR] Can't skip running doc tests with --no-run") + .run(); +} + +#[cargo_test] +fn test_all_targets_lib() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("test --all-targets") + .with_stderr( + "\ +[COMPILING] foo [..] +[FINISHED] dev [..] +[RUNNING] [..]foo[..] +", + ) + .run(); +} + +#[cargo_test] +fn test_dep_with_dev() { + Package::new("devdep", "0.1.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + + [dependencies] + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [dev-dependencies] + devdep = "0.1" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + p.cargo("test -p bar") + .with_status(101) + .with_stderr( + "[ERROR] package `bar` cannot be tested because it requires dev-dependencies \ + and is not a member of the workspace", + ) + .run(); +} diff --git a/tests/testsuite/tool_paths.rs b/tests/testsuite/tool_paths.rs new file mode 100644 index 00000000000..07dfa082999 --- /dev/null +++ b/tests/testsuite/tool_paths.rs @@ -0,0 +1,263 @@ +use crate::support::rustc_host; +use crate::support::{basic_lib_manifest, project}; + +#[cargo_test] +fn pathless_tools() { + let target = rustc_host(); + + let foo = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + ar = "nonexistent-ar" + linker = "nonexistent-linker" + "#, + target + ), + ) + .build(); + + foo.cargo("build --verbose") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn absolute_tools() { + let target = rustc_host(); + + // Escaped as they appear within a TOML config file + let config = if cfg!(windows) { + ( + r#"C:\\bogus\\nonexistent-ar"#, + r#"C:\\bogus\\nonexistent-linker"#, + ) + } else { + (r#"/bogus/nonexistent-ar"#, r#"/bogus/nonexistent-linker"#) + }; + + let foo = project() + .file("Cargo.toml", &basic_lib_manifest("foo")) + .file("src/lib.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{target}] + ar = "{ar}" + linker = "{linker}" + "#, + target = target, + ar = config.0, + linker = config.1 + ), + ) + .build(); + + foo.cargo("build --verbose") + .with_stderr( + "\ +[COMPILING] foo v0.5.0 ([CWD]) +[RUNNING] `rustc [..] -C ar=[..]bogus/nonexistent-ar -C linker=[..]bogus/nonexistent-linker [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn relative_tools() { + let target = rustc_host(); + + // Escaped as they appear within a TOML config file + let config = if cfg!(windows) { + (r#".\\nonexistent-ar"#, r#".\\tools\\nonexistent-linker"#) + } else { + (r#"./nonexistent-ar"#, r#"./tools/nonexistent-linker"#) + }; + + // Funky directory structure to test that relative tool paths are made absolute + // by reference to the `.cargo/..` directory and not to (for example) the CWD. + let p = project() + .no_manifest() + .file("bar/Cargo.toml", &basic_lib_manifest("bar")) + .file("bar/src/lib.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{target}] + ar = "{ar}" + linker = "{linker}" + "#, + target = target, + ar = config.0, + linker = config.1 + ), + ) + .build(); + + let prefix = p.root().into_os_string().into_string().unwrap(); + + p.cargo("build --verbose").cwd("bar").with_stderr(&format!( + "\ +[COMPILING] bar v0.5.0 ([CWD]) +[RUNNING] `rustc [..] -C ar={prefix}/./nonexistent-ar -C linker={prefix}/./tools/nonexistent-linker [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + prefix = prefix, + )).run(); +} + +#[cargo_test] +fn custom_runner() { + let target = rustc_host(); + + let p = project() + .file("src/main.rs", "fn main() {}") + .file("tests/test.rs", "") + .file("benches/bench.rs", "") + .file( + ".cargo/config", + &format!( + r#" + [target.{}] + runner = "nonexistent-runner -r" + "#, + target + ), + ) + .build(); + + p.cargo("run -- --param") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` +", + ) + .run(); + + p.cargo("test --test test --verbose -- --param") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc [..]` +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `nonexistent-runner -r [..]/target/debug/deps/test-[..][EXE] --param` +", + ) + .run(); + + p.cargo("bench --bench bench --verbose -- --param") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[RUNNING] `rustc [..]` +[RUNNING] `rustc [..]` +[FINISHED] release [optimized] target(s) in [..] +[RUNNING] `nonexistent-runner -r [..]/target/release/deps/bench-[..][EXE] --param --bench` +", + ) + .run(); +} + +// can set a custom runner via `target.'cfg(..)'.runner` +#[cargo_test] +fn custom_runner_cfg() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [target.'cfg(not(target_os = "none"))'] + runner = "nonexistent-runner -r" + "#, + ) + .build(); + + p.cargo("run -- --param") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` +", + ) + .run(); +} + +// custom runner set via `target.$triple.runner` have precende over `target.'cfg(..)'.runner` +#[cargo_test] +fn custom_runner_cfg_precedence() { + let target = rustc_host(); + + let p = project() + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + &format!( + r#" + [target.'cfg(not(target_os = "none"))'] + runner = "ignored-runner" + + [target.{}] + runner = "nonexistent-runner -r" + "#, + target + ), + ) + .build(); + + p.cargo("run -- --param") + .with_status(101) + .with_stderr_contains( + "\ +[COMPILING] foo v0.0.1 ([CWD]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` +", + ) + .run(); +} + +#[cargo_test] +fn custom_runner_cfg_collision() { + let p = project() + .file("src/main.rs", "fn main() {}") + .file( + ".cargo/config", + r#" + [target.'cfg(not(target_arch = "avr"))'] + runner = "true" + + [target.'cfg(not(target_os = "none"))'] + runner = "false" + "#, + ) + .build(); + + p.cargo("run -- --param") + .with_status(101) + .with_stderr_contains( + "\ +[ERROR] several matching instances of `target.'cfg(..)'.runner` in `.cargo/config` +", + ) + .run(); +} diff --git a/tests/testsuite/update.rs b/tests/testsuite/update.rs new file mode 100644 index 00000000000..468458563b0 --- /dev/null +++ b/tests/testsuite/update.rs @@ -0,0 +1,639 @@ +use std::fs::File; +use std::io::prelude::*; + +use crate::support::registry::Package; +use crate::support::{basic_manifest, project}; + +#[cargo_test] +fn minor_update_two_places() { + Package::new("log", "0.1.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + Package::new("log", "0.1.1").publish(); + + File::create(p.root().join("foo/Cargo.toml")) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .unwrap(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn transitive_minor_update() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").dep("log", "0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + Package::new("log", "0.1.1").publish(); + Package::new("serde", "0.1.1").dep("log", "0.1.1").publish(); + + // Note that `serde` isn't actually updated here! The default behavior for + // `update` right now is to as conservatively as possible attempt to satisfy + // an update. In this case we previously locked the dependency graph to `log + // 0.1.0`, but nothing on the command line says we're allowed to update + // that. As a result the update of `serde` here shouldn't update to `serde + // 0.1.1` as that would also force an update to `log 0.1.1`. + // + // Also note that this is probably counterintuitive and weird. We may wish + // to change this one day. + p.cargo("update -p serde") + .with_stderr( + "\ +[UPDATING] `[..]` index +", + ) + .run(); +} + +#[cargo_test] +fn conservative() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").dep("log", "0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + Package::new("log", "0.1.1").publish(); + Package::new("serde", "0.1.1").dep("log", "0.1").publish(); + + p.cargo("update -p serde") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] serde v0.1.0 -> v0.1.1 +", + ) + .run(); +} + +#[cargo_test] +fn update_via_new_dep() { + Package::new("log", "0.1.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + # foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + Package::new("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + p.cargo("build").env("CARGO_LOG", "cargo=trace").run(); +} + +#[cargo_test] +fn update_via_new_member() { + Package::new("log", "0.1.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [workspace] + # members = [ "foo" ] + + [dependencies] + log = "0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + Package::new("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + p.cargo("build").run(); +} + +#[cargo_test] +fn add_dep_deep_new_requirement() { + Package::new("log", "0.1.0").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + # bar = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + Package::new("log", "0.1.1").publish(); + Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + p.cargo("build").run(); +} + +#[cargo_test] +fn everything_real_deep() { + Package::new("log", "0.1.0").publish(); + Package::new("foo", "0.1.0").dep("log", "0.1").publish(); + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + # bar = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + Package::new("log", "0.1.1").publish(); + Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + p.cargo("build").run(); +} + +#[cargo_test] +fn change_package_version() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "a-foo" + version = "0.2.0-alpha" + authors = [] + + [dependencies] + bar = { path = "bar", version = "0.2.0-alpha" } + "#, + ) + .file("src/lib.rs", "") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0-alpha")) + .file("bar/src/lib.rs", "") + .file( + "Cargo.lock", + r#" + [[package]] + name = "foo" + version = "0.2.0" + dependencies = ["bar 0.2.0"] + + [[package]] + name = "bar" + version = "0.2.0" + "#, + ) + .build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn update_precise() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").publish(); + Package::new("serde", "0.2.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.2" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + + Package::new("serde", "0.2.0").publish(); + + p.cargo("update -p serde:0.2.1 --precise 0.2.0") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] serde v0.2.1 -> v0.2.0 +", + ) + .run(); +} + +// cargo update should respect its arguments even without a lockfile. +// See issue "Running cargo update without a Cargo.lock ignores arguments" +// at . +#[cargo_test] +fn update_precise_first_run() { + Package::new("serde", "0.1.0").publish(); + Package::new("serde", "0.2.0").publish(); + Package::new("serde", "0.2.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + + [dependencies] + serde = "0.2" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("update -p serde --precise 0.2.0") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] serde v0.2.1 -> v0.2.0 +", + ) + .run(); + + // Assert `cargo metadata` shows serde 0.2.0 + p.cargo("metadata") + .with_json( + r#"{ + "packages": [ + { + "authors": [], + "categories": [], + "dependencies": [], + "description": null, + "edition": "2015", + "features": {}, + "id": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]/home/.cargo/registry/src/-[..]/serde-0.2.0/Cargo.toml", + "metadata": null, + "name": "serde", + "readme": null, + "repository": null, + "source": "registry+https://github.com/rust-lang/crates.io-index", + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "serde", + "src_path": "[..]/home/.cargo/registry/src/-[..]/serde-0.2.0/src/lib.rs" + } + ], + "version": "0.2.0" + }, + { + "authors": [], + "categories": [], + "dependencies": [ + { + "features": [], + "kind": null, + "name": "serde", + "optional": false, + "registry": null, + "rename": null, + "req": "^0.2", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "target": null, + "uses_default_features": true + } + ], + "description": null, + "edition": "2015", + "features": {}, + "id": "bar 0.0.1 (path+file://[..]/foo)", + "keywords": [], + "license": null, + "license_file": null, + "links": null, + "manifest_path": "[..]/foo/Cargo.toml", + "metadata": null, + "name": "bar", + "readme": null, + "repository": null, + "source": null, + "targets": [ + { + "crate_types": [ + "lib" + ], + "doctest": true, + "edition": "2015", + "kind": [ + "lib" + ], + "name": "bar", + "src_path": "[..]/foo/src/lib.rs" + } + ], + "version": "0.0.1" + } + ], + "resolve": { + "nodes": [ + { + "dependencies": [ + "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" + ], + "deps": [ + { + "name": "serde", + "pkg": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" + } + ], + "features": [], + "id": "bar 0.0.1 (path+file://[..]/foo)" + }, + { + "dependencies": [], + "deps": [], + "features": [], + "id": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" + } + ], + "root": "bar 0.0.1 (path+file://[..]/foo)" + }, + "target_directory": "[..]/foo/target", + "version": 1, + "workspace_members": [ + "bar 0.0.1 (path+file://[..]/foo)" + ], + "workspace_root": "[..]/foo" +}"#, + ) + .run(); + + p.cargo("update -p serde --precise 0.2.0") + .with_stderr( + "\ +[UPDATING] `[..]` index +", + ) + .run(); +} + +#[cargo_test] +fn preserve_top_comment() { + let p = project().file("src/lib.rs", "").build(); + + p.cargo("update").run(); + + let lockfile = p.read_lockfile(); + assert!(lockfile.starts_with("# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.\n")); + + let mut lines = lockfile.lines().collect::>(); + lines.insert(2, "# some other comment"); + let mut lockfile = lines.join("\n"); + lockfile.push_str("\n"); // .lines/.join loses the last newline + println!("saving Cargo.lock contents:\n{}", lockfile); + + p.change_file("Cargo.lock", &lockfile); + + p.cargo("update").run(); + + let lockfile2 = p.read_lockfile(); + println!("loaded Cargo.lock contents:\n{}", lockfile2); + + assert_eq!(lockfile, lockfile2); +} + +#[cargo_test] +fn dry_run_update() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").dep("log", "0.1").publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + p.cargo("build").run(); + let old_lockfile = p.read_file("Cargo.lock"); + + Package::new("log", "0.1.1").publish(); + Package::new("serde", "0.1.1").dep("log", "0.1").publish(); + + p.cargo("update -p serde --dry-run") + .with_stderr( + "\ +[UPDATING] `[..]` index +[UPDATING] serde v0.1.0 -> v0.1.1 +[WARNING] not updating lockfile due to dry run +", + ) + .run(); + let new_lockfile = p.read_file("Cargo.lock"); + assert_eq!(old_lockfile, new_lockfile) +} diff --git a/tests/testsuite/vendor.rs b/tests/testsuite/vendor.rs new file mode 100644 index 00000000000..e62b0061376 --- /dev/null +++ b/tests/testsuite/vendor.rs @@ -0,0 +1,508 @@ +use crate::support::git; +use crate::support::registry::Package; +use crate::support::{basic_lib_manifest, project, Project}; + +#[cargo_test] +fn vendor_simple() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + log = "0.3.5" + "#, + ) + .file("src/lib.rs", "") + .build(); + + Package::new("log", "0.3.5").publish(); + + p.cargo("vendor --respect-source-config").run(); + let lock = p.read_file("vendor/log/Cargo.toml"); + assert!(lock.contains("version = \"0.3.5\"")); + + add_vendor_config(&p); + p.cargo("build").run(); +} + +fn add_vendor_config(p: &Project) { + p.change_file( + ".cargo/config", + r#" + [source.crates-io] + replace-with = 'vendor' + + [source.vendor] + directory = 'vendor' + "#, + ); +} + +#[cargo_test] +fn two_versions() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bitflags = "0.8.0" + bar = { path = "bar" } + "#, + ) + .file("src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + + [dependencies] + bitflags = "0.7.0" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + Package::new("bitflags", "0.7.0").publish(); + Package::new("bitflags", "0.8.0").publish(); + + p.cargo("vendor --respect-source-config").run(); + + let lock = p.read_file("vendor/bitflags/Cargo.toml"); + assert!(lock.contains("version = \"0.8.0\"")); + let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml"); + assert!(lock.contains("version = \"0.7.0\"")); + + add_vendor_config(&p); + p.cargo("build").run(); +} + +#[cargo_test] +fn help() { + let p = project().build(); + p.cargo("vendor -h").run(); +} + +#[cargo_test] +fn update_versions() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bitflags = "0.7.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + Package::new("bitflags", "0.7.0").publish(); + Package::new("bitflags", "0.8.0").publish(); + + p.cargo("vendor --respect-source-config").run(); + + let lock = p.read_file("vendor/bitflags/Cargo.toml"); + assert!(lock.contains("version = \"0.7.0\"")); + + p.change_file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bitflags = "0.8.0" + "#, + ); + p.cargo("vendor --respect-source-config").run(); + + let lock = p.read_file("vendor/bitflags/Cargo.toml"); + assert!(lock.contains("version = \"0.8.0\"")); +} + +#[cargo_test] +fn two_lockfiles() { + let p = project() + .no_manifest() + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bitflags = "=0.7.0" + "#, + ) + .file("foo/src/lib.rs", "") + .file( + "bar/Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + + [dependencies] + bitflags = "=0.8.0" + "#, + ) + .file("bar/src/lib.rs", "") + .build(); + + Package::new("bitflags", "0.7.0").publish(); + Package::new("bitflags", "0.8.0").publish(); + + p.cargo("vendor --respect-source-config -s bar/Cargo.toml --manifest-path foo/Cargo.toml") + .run(); + + let lock = p.read_file("vendor/bitflags/Cargo.toml"); + assert!(lock.contains("version = \"0.8.0\"")); + let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml"); + assert!(lock.contains("version = \"0.7.0\"")); + + add_vendor_config(&p); + p.cargo("build").cwd("foo").run(); + p.cargo("build").cwd("bar").run(); +} + +#[cargo_test] +fn delete_old_crates() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + bitflags = "=0.7.0" + "#, + ) + .file("src/lib.rs", "") + .build(); + + Package::new("bitflags", "0.7.0").publish(); + Package::new("log", "0.3.5").publish(); + + p.cargo("vendor --respect-source-config").run(); + p.read_file("vendor/bitflags/Cargo.toml"); + + p.change_file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + log = "=0.3.5" + "#, + ); + + p.cargo("vendor --respect-source-config").run(); + let lock = p.read_file("vendor/log/Cargo.toml"); + assert!(lock.contains("version = \"0.3.5\"")); + assert!(!p.root().join("vendor/bitflags/Cargo.toml").exists()); +} + +#[cargo_test] +fn ignore_files() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + url = "1.4.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + Package::new("url", "1.4.1") + .file("src/lib.rs", "") + .file("foo.orig", "") + .file(".gitignore", "") + .file(".gitattributes", "") + .file("foo.rej", "") + .publish(); + + p.cargo("vendor --respect-source-config").run(); + let csum = p.read_file("vendor/url/.cargo-checksum.json"); + assert!(!csum.contains("foo.orig")); + assert!(!csum.contains(".gitignore")); + assert!(!csum.contains(".gitattributes")); + assert!(!csum.contains(".cargo-ok")); + assert!(!csum.contains("foo.rej")); +} + +#[cargo_test] +fn included_files_only() { + let git = git::new("a", |p| { + p.file("Cargo.toml", &basic_lib_manifest("a")) + .file("src/lib.rs", "") + .file(".gitignore", "a") + .file("a/b.md", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + a = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("vendor --respect-source-config").run(); + let csum = p.read_file("vendor/a/.cargo-checksum.json"); + assert!(!csum.contains("a/b.md")); +} + +#[cargo_test] +fn dependent_crates_in_crates() { + let git = git::new("a", |p| { + p.file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + + [dependencies] + b = { path = 'b' } + "#, + ) + .file("src/lib.rs", "") + .file("b/Cargo.toml", &basic_lib_manifest("b")) + .file("b/src/lib.rs", "") + }) + .unwrap(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + a = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("vendor --respect-source-config").run(); + p.read_file("vendor/a/.cargo-checksum.json"); + p.read_file("vendor/b/.cargo-checksum.json"); +} + +#[cargo_test] +fn vendoring_git_crates() { + let git = git::new("git", |p| { + p.file("Cargo.toml", &basic_lib_manifest("serde_derive")) + .file("src/lib.rs", "") + .file("src/wut.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies.serde] + version = "0.5.0" + + [dependencies.serde_derive] + version = "0.5.0" + + [patch.crates-io] + serde_derive = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + Package::new("serde", "0.5.0") + .dep("serde_derive", "0.5") + .publish(); + Package::new("serde_derive", "0.5.0").publish(); + + p.cargo("vendor --respect-source-config").run(); + p.read_file("vendor/serde_derive/src/wut.rs"); + + add_vendor_config(&p); + p.cargo("build").run(); +} + +#[cargo_test] +fn git_simple() { + let git = git::new("git", |p| { + p.file("Cargo.toml", &basic_lib_manifest("a")) + .file("src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + a = {{ git = '{}' }} + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("vendor --respect-source-config").run(); + let csum = p.read_file("vendor/a/.cargo-checksum.json"); + assert!(csum.contains("\"package\":null")); +} + +#[cargo_test] +fn git_duplicate() { + let git = git::new("a", |p| { + p.file( + "Cargo.toml", + r#" + [package] + name = "a" + version = "0.1.0" + + [dependencies] + b = { path = 'b' } + "#, + ) + .file("src/lib.rs", "") + .file("b/Cargo.toml", &basic_lib_manifest("b")) + .file("b/src/lib.rs", "") + }) + .unwrap(); + + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + a = {{ git = '{}' }} + b = '0.5.0' + + "#, + git.url() + ), + ) + .file("src/lib.rs", "") + .build(); + Package::new("b", "0.5.0").publish(); + + p.cargo("vendor --respect-source-config") + .with_stderr( + "\ +[UPDATING] [..] +[UPDATING] [..] +[DOWNLOADING] [..] +[DOWNLOADED] [..] +error: failed to sync + +Caused by: + found duplicate version of package `b v0.5.0` vendored from two sources: + +source 1: [..] +source 2: [..] +", + ) + .with_status(101) + .run(); +} + +#[cargo_test] +fn depend_on_vendor_dir_not_deleted() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + libc = "0.2.30" + "#, + ) + .file("src/lib.rs", "") + .build(); + + Package::new("libc", "0.2.30").publish(); + + p.cargo("vendor --respect-source-config").run(); + assert!(p.root().join("vendor/libc").is_dir()); + + p.change_file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + libc = "0.2.30" + + [patch.crates-io] + libc = { path = 'vendor/libc' } + "#, + ); + + p.cargo("vendor --respect-source-config").run(); + assert!(p.root().join("vendor/libc").is_dir()); +} diff --git a/tests/testsuite/verify_project.rs b/tests/testsuite/verify_project.rs new file mode 100644 index 00000000000..0bebf359def --- /dev/null +++ b/tests/testsuite/verify_project.rs @@ -0,0 +1,71 @@ +use crate::support::{basic_bin_manifest, main_file, project}; + +fn verify_project_success_output() -> String { + r#"{"success":"true"}"#.into() +} + +#[cargo_test] +fn cargo_verify_project_path_to_cargo_toml_relative() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("verify-project --manifest-path foo/Cargo.toml") + .cwd(p.root().parent().unwrap()) + .with_stdout(verify_project_success_output()) + .run(); +} + +#[cargo_test] +fn cargo_verify_project_path_to_cargo_toml_absolute() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("verify-project --manifest-path") + .arg(p.root().join("Cargo.toml")) + .cwd(p.root().parent().unwrap()) + .with_stdout(verify_project_success_output()) + .run(); +} + +#[cargo_test] +fn cargo_verify_project_cwd() { + let p = project() + .file("Cargo.toml", &basic_bin_manifest("foo")) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .build(); + + p.cargo("verify-project") + .with_stdout(verify_project_success_output()) + .run(); +} + +#[cargo_test] +fn cargo_verify_project_honours_unstable_features() { + let p = project() + .file( + "Cargo.toml", + r#" + cargo-features = ["test-dummy-unstable"] + + [package] + name = "foo" + version = "0.0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("verify-project") + .masquerade_as_nightly_cargo() + .with_stdout(verify_project_success_output()) + .run(); + + p.cargo("verify-project") + .with_status(1) + .with_stdout(r#"{"invalid":"failed to parse manifest at `[CWD]/Cargo.toml`"}"#) + .run(); +} diff --git a/tests/testsuite/version.rs b/tests/testsuite/version.rs new file mode 100644 index 00000000000..7a04bde0405 --- /dev/null +++ b/tests/testsuite/version.rs @@ -0,0 +1,42 @@ +use crate::support::project; +use cargo; + +#[cargo_test] +fn simple() { + let p = project().build(); + + p.cargo("version") + .with_stdout(&format!("{}\n", cargo::version())) + .run(); + + p.cargo("--version") + .with_stdout(&format!("{}\n", cargo::version())) + .run(); +} + +#[cargo_test] +#[cfg_attr(target_os = "windows", ignore)] +fn version_works_without_rustc() { + let p = project().build(); + p.cargo("version").env("PATH", "").run(); +} + +#[cargo_test] +fn version_works_with_bad_config() { + let p = project().file(".cargo/config", "this is not toml").build(); + p.cargo("version").run(); +} + +#[cargo_test] +fn version_works_with_bad_target_dir() { + let p = project() + .file( + ".cargo/config", + r#" + [build] + target-dir = 4 + "#, + ) + .build(); + p.cargo("version").run(); +} diff --git a/tests/testsuite/warn_on_failure.rs b/tests/testsuite/warn_on_failure.rs new file mode 100644 index 00000000000..bf662d81a77 --- /dev/null +++ b/tests/testsuite/warn_on_failure.rs @@ -0,0 +1,109 @@ +use crate::support::registry::Package; +use crate::support::{project, Project}; + +static WARNING1: &str = "Hello! I'm a warning. :)"; +static WARNING2: &str = "And one more!"; + +fn make_lib(lib_src: &str) { + Package::new("bar", "0.0.1") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + authors = [] + version = "0.0.1" + build = "build.rs" + "#, + ) + .file( + "build.rs", + &format!( + r#" + fn main() {{ + use std::io::Write; + println!("cargo:warning={{}}", "{}"); + println!("hidden stdout"); + write!(&mut ::std::io::stderr(), "hidden stderr"); + println!("cargo:warning={{}}", "{}"); + }} + "#, + WARNING1, WARNING2 + ), + ) + .file("src/lib.rs", &format!("fn f() {{ {} }}", lib_src)) + .publish(); +} + +fn make_upstream(main_src: &str) -> Project { + project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("src/main.rs", &format!("fn main() {{ {} }}", main_src)) + .build() +} + +#[cargo_test] +fn no_warning_on_success() { + make_lib(""); + let upstream = make_upstream(""); + upstream + .cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.0.1 ([..]) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn no_warning_on_bin_failure() { + make_lib(""); + let upstream = make_upstream("hi()"); + upstream + .cargo("build") + .with_status(101) + .with_stdout_does_not_contain("hidden stdout") + .with_stderr_does_not_contain("hidden stderr") + .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1)) + .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2)) + .with_stderr_contains("[UPDATING] `[..]` index") + .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])") + .with_stderr_contains("[COMPILING] bar v0.0.1") + .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") + .run(); +} + +#[cargo_test] +fn warning_on_lib_failure() { + make_lib("err()"); + let upstream = make_upstream(""); + upstream + .cargo("build") + .with_status(101) + .with_stdout_does_not_contain("hidden stdout") + .with_stderr_does_not_contain("hidden stderr") + .with_stderr_does_not_contain("[COMPILING] foo v0.0.1 ([..])") + .with_stderr_contains("[UPDATING] `[..]` index") + .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])") + .with_stderr_contains("[COMPILING] bar v0.0.1") + .with_stderr_contains(&format!("[WARNING] {}", WARNING1)) + .with_stderr_contains(&format!("[WARNING] {}", WARNING2)) + .run(); +} diff --git a/tests/testsuite/workspaces.rs b/tests/testsuite/workspaces.rs new file mode 100644 index 00000000000..35ec7a52e75 --- /dev/null +++ b/tests/testsuite/workspaces.rs @@ -0,0 +1,2175 @@ +use std::env; +use std::fs::{self, File}; +use std::io::{Read, Write}; + +use crate::support::registry::Package; +use crate::support::sleep_ms; +use crate::support::{basic_lib_manifest, basic_manifest, git, project}; + +#[cargo_test] +fn simple_explicit() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + assert!(!p.bin("bar").is_file()); + + p.cargo("build").cwd("bar").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("bar").is_file()); + + assert!(p.root().join("Cargo.lock").is_file()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); +} + +#[cargo_test] +fn simple_explicit_default_members() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + default-members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.bin("bar").is_file()); + assert!(!p.bin("foo").is_file()); +} + +#[cargo_test] +fn inferred_root() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + assert!(!p.bin("bar").is_file()); + + p.cargo("build").cwd("bar").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("bar").is_file()); + + assert!(p.root().join("Cargo.lock").is_file()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); +} + +#[cargo_test] +fn inferred_path_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + assert!(!p.bin("bar").is_file()); + + p.cargo("build").cwd("bar").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("bar").is_file()); + + assert!(p.root().join("Cargo.lock").is_file()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); +} + +#[cargo_test] +fn transitive_path_dep() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "bar" } + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + baz = { path = "../baz" } + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", "") + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("baz/src/main.rs", "fn main() {}") + .file("baz/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + assert!(!p.bin("bar").is_file()); + assert!(!p.bin("baz").is_file()); + + p.cargo("build").cwd("bar").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("bar").is_file()); + assert!(!p.bin("baz").is_file()); + + p.cargo("build").cwd("baz").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("bar").is_file()); + assert!(p.bin("baz").is_file()); + + assert!(p.root().join("Cargo.lock").is_file()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); + assert!(!p.root().join("baz/Cargo.lock").is_file()); +} + +#[cargo_test] +fn parent_pointer_works() { + let p = project() + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "../bar" } + + [workspace] + "#, + ) + .file("foo/src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../foo" + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .file("bar/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").cwd("foo").run(); + p.cargo("build").cwd("bar").run(); + assert!(p.root().join("foo/Cargo.lock").is_file()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); +} + +#[cargo_test] +fn same_names_in_workspace() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: two packages named `foo` in this workspace: +- [..]Cargo.toml +- [..]Cargo.toml +", + ) + .run(); +} + +#[cargo_test] +fn parent_doesnt_point_to_child() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .cwd("bar") + .with_status(101) + .with_stderr( + "\ +error: current package believes it's in a workspace when it's not: +current: [..]Cargo.toml +workspace: [..]Cargo.toml + +this may be fixable [..] +[..] +", + ) + .run(); +} + +#[cargo_test] +fn invalid_parent_pointer() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "foo" + "#, + ) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +", + ) + .run(); +} + +#[cargo_test] +fn invalid_members() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["foo"] + "#, + ) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +", + ) + .run(); +} + +#[cargo_test] +fn bare_workspace_ok() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#, + ) + .file("src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn two_roots() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [workspace] + members = [".."] + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: multiple workspace roots found in the same workspace: + [..] + [..] +", + ) + .run(); +} + +#[cargo_test] +fn workspace_isnt_root() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr("error: root of a workspace inferred but wasn't a root: [..]") + .run(); +} + +#[cargo_test] +fn dangling_member() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../baz" + "#, + ) + .file("bar/src/main.rs", "fn main() {}") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../baz" + "#, + ) + .file("baz/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: package `[..]` is a member of the wrong workspace +expected: [..] +actual: [..] +", + ) + .run(); +} + +#[cargo_test] +fn cycle() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + workspace = "bar" + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = ".." + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "[ERROR] root of a workspace inferred but wasn't a root: [..]/foo/bar/Cargo.toml", + ) + .run(); +} + +#[cargo_test] +fn share_dependencies() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "0.1" + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "< 0.1.5" + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.3").publish(); + Package::new("dep1", "0.1.8").publish(); + + p.cargo("build") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] dep1 v0.1.3 ([..]) +[COMPILING] dep1 v0.1.3 +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn fetch_fetches_all() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "*" + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.3").publish(); + + p.cargo("fetch") + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] dep1 v0.1.3 ([..]) +", + ) + .run(); +} + +#[cargo_test] +fn lock_works_for_everyone() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + dep2 = "0.1" + + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + dep1 = "0.1" + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + Package::new("dep1", "0.1.0").publish(); + Package::new("dep2", "0.1.0").publish(); + + p.cargo("generate-lockfile") + .with_stderr("[UPDATING] `[..]` index") + .run(); + + Package::new("dep1", "0.1.1").publish(); + Package::new("dep2", "0.1.1").publish(); + + p.cargo("build") + .with_stderr( + "\ +[DOWNLOADING] crates ... +[DOWNLOADED] dep2 v0.1.0 ([..]) +[COMPILING] dep2 v0.1.0 +[COMPILING] foo v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + p.cargo("build") + .cwd("bar") + .with_stderr( + "\ +[DOWNLOADING] crates ... +[DOWNLOADED] dep1 v0.1.0 ([..]) +[COMPILING] dep1 v0.1.0 +[COMPILING] bar v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); +} + +#[cargo_test] +fn virtual_works() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + p.cargo("build").cwd("bar").run(); + assert!(p.root().join("Cargo.lock").is_file()); + assert!(p.bin("bar").is_file()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); +} + +#[cargo_test] +fn explicit_package_argument_works_with_virtual_manifest() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + p.cargo("build --package bar").run(); + assert!(p.root().join("Cargo.lock").is_file()); + assert!(p.bin("bar").is_file()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); +} + +#[cargo_test] +fn virtual_misconfigure() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + p.cargo("build") + .cwd("bar") + .with_status(101) + .with_stderr( + "\ +error: current package believes it's in a workspace when it's not: +current: [CWD]/Cargo.toml +workspace: [..]Cargo.toml + +this may be fixable by adding `bar` to the `workspace.members` array of the \ +manifest located at: [..] +[..] +", + ) + .run(); +} + +#[cargo_test] +fn virtual_build_all_implied() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + p.cargo("build").run(); +} + +#[cargo_test] +fn virtual_default_members() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar", "baz"] + default-members = ["bar"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}") + .file("baz/src/main.rs", "fn main() {}"); + let p = p.build(); + p.cargo("build").run(); + assert!(p.bin("bar").is_file()); + assert!(!p.bin("baz").is_file()); +} + +#[cargo_test] +fn virtual_default_member_is_not_a_member() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["bar"] + default-members = ["something-else"] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: package `[..]something-else` is listed in workspace’s default-members \ +but is not a member. +", + ) + .run(); +} + +#[cargo_test] +fn virtual_build_no_members() { + let p = project().file( + "Cargo.toml", + r#" + [workspace] + "#, + ); + let p = p.build(); + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: manifest path `[..]` contains no package: The manifest is virtual, \ +and the workspace has no members. +", + ) + .run(); +} + +#[cargo_test] +fn include_virtual() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + [workspace] + members = ["bar"] + "#, + ) + .file("src/main.rs", "") + .file( + "bar/Cargo.toml", + r#" + [workspace] + "#, + ); + let p = p.build(); + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: multiple workspace roots found in the same workspace: + [..] + [..] +", + ) + .run(); +} + +#[cargo_test] +fn members_include_path_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["p1"] + + [dependencies] + p3 = { path = "p3" } + "#, + ) + .file("src/lib.rs", "") + .file( + "p1/Cargo.toml", + r#" + [project] + name = "p1" + version = "0.1.0" + authors = [] + + [dependencies] + p2 = { path = "../p2" } + "#, + ) + .file("p1/src/lib.rs", "") + .file("p2/Cargo.toml", &basic_manifest("p2", "0.1.0")) + .file("p2/src/lib.rs", "") + .file("p3/Cargo.toml", &basic_manifest("p3", "0.1.0")) + .file("p3/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").cwd("p1").run(); + p.cargo("build").cwd("p2").run(); + p.cargo("build").cwd("p3").run(); + p.cargo("build").run(); + + assert!(p.root().join("target").is_dir()); + assert!(!p.root().join("p1/target").is_dir()); + assert!(!p.root().join("p2/target").is_dir()); + assert!(!p.root().join("p3/target").is_dir()); +} + +#[cargo_test] +fn new_warns_you_this_will_not_work() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + "#, + ) + .file("src/lib.rs", ""); + let p = p.build(); + + p.cargo("new --lib bar") + .env("USER", "foo") + .with_stderr( + "\ +warning: compiling this new crate may not work due to invalid workspace \ +configuration + +current package believes it's in a workspace when it's not: +current: [..] +workspace: [..] + +this may be fixable by ensuring that this crate is depended on by the workspace \ +root: [..] +[..] +[CREATED] library `bar` package +", + ) + .run(); +} + +#[cargo_test] +fn new_warning_with_corrupt_ws() { + let p = project().file("Cargo.toml", "asdf").build(); + p.cargo("new bar") + .env("USER", "foo") + .with_stderr( + "\ +[WARNING] compiling this new crate may not work due to invalid workspace configuration + +failed to parse manifest at `[..]foo/Cargo.toml` +Caused by: + could not parse input as TOML +Caused by: + expected an equals, found eof at line 1 + Created binary (application) `bar` package +", + ) + .run(); +} + +#[cargo_test] +fn lock_doesnt_change_depending_on_crate() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ['baz'] + + [dependencies] + foo = "*" + "#, + ) + .file("src/lib.rs", "") + .file( + "baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + + [dependencies] + bar = "*" + "#, + ) + .file("baz/src/lib.rs", ""); + let p = p.build(); + + Package::new("foo", "1.0.0").publish(); + Package::new("bar", "1.0.0").publish(); + + p.cargo("build").run(); + + let mut lockfile = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile)); + + p.cargo("build").cwd("baz").run(); + + let mut lockfile2 = String::new(); + t!(t!(File::open(p.root().join("Cargo.lock"))).read_to_string(&mut lockfile2)); + + assert_eq!(lockfile, lockfile2); +} + +#[cargo_test] +fn rebuild_please() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ['lib', 'bin'] + "#, + ) + .file("lib/Cargo.toml", &basic_manifest("lib", "0.1.0")) + .file( + "lib/src/lib.rs", + r#" + pub fn foo() -> u32 { 0 } + "#, + ) + .file( + "bin/Cargo.toml", + r#" + [package] + name = "bin" + version = "0.1.0" + + [dependencies] + lib = { path = "../lib" } + "#, + ) + .file( + "bin/src/main.rs", + r#" + extern crate lib; + + fn main() { + assert_eq!(lib::foo(), 0); + } + "#, + ); + let p = p.build(); + + p.cargo("run").cwd("bin").run(); + + sleep_ms(1000); + + t!(t!(File::create(p.root().join("lib/src/lib.rs"))) + .write_all(br#"pub fn foo() -> u32 { 1 }"#)); + + p.cargo("build").cwd("lib").run(); + + p.cargo("run") + .cwd("bin") + .with_status(101) + .with_stderr_contains("[..]assertion[..]") + .run(); +} + +#[cargo_test] +fn workspace_in_git() { + let git_project = git::new("dep1", |project| { + project + .file( + "Cargo.toml", + r#" + [workspace] + members = ["foo"] + "#, + ) + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", "") + }) + .unwrap(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "lib" + version = "0.1.0" + + [dependencies.foo] + git = '{}' + "#, + git_project.url() + ), + ) + .file( + "src/lib.rs", + r#" + pub fn foo() -> u32 { 0 } + "#, + ); + let p = p.build(); + + p.cargo("build").run(); +} + +#[cargo_test] +fn lockfile_can_specify_nonexistant_members() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/main.rs", "fn main() {}") + .file( + "Cargo.lock", + r#" + [[package]] + name = "a" + version = "0.1.0" + + [[package]] + name = "b" + version = "0.1.0" + "#, + ); + + let p = p.build(); + + p.cargo("build").cwd("a").run(); +} + +#[cargo_test] +fn you_cannot_generate_lockfile_for_empty_workspaces() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + "#, + ) + .file("bar/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("update") + .with_status(101) + .with_stderr("error: you can't generate a lockfile for an empty workspace.") + .run(); +} + +#[cargo_test] +fn workspace_with_transitive_dev_deps() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.5.0" + authors = ["mbrubeck@example.com"] + + [dependencies.bar] + path = "bar" + + [workspace] + "#, + ) + .file("src/main.rs", r#"fn main() {}"#) + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.5.0" + authors = ["mbrubeck@example.com"] + + [dev-dependencies.baz] + path = "../baz" + "#, + ) + .file( + "bar/src/lib.rs", + r#" + pub fn init() {} + + #[cfg(test)] + + #[test] + fn test() { + extern crate baz; + baz::do_stuff(); + } + "#, + ) + .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) + .file("baz/src/lib.rs", r#"pub fn do_stuff() {}"#); + let p = p.build(); + + p.cargo("test -p bar").run(); +} + +#[cargo_test] +fn error_if_parent_cargo_toml_is_invalid() { + let p = project() + .file("Cargo.toml", "Totally not a TOML file") + .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .cwd("bar") + .with_status(101) + .with_stderr_contains("[ERROR] failed to parse manifest at `[..]`") + .run(); +} + +#[cargo_test] +fn relative_path_for_member_works() { + let p = project() + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["../bar"] + "#, + ) + .file("foo/src/main.rs", "fn main() {}") + .file( + "bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../foo" + "#, + ) + .file("bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build").cwd("foo").run(); + p.cargo("build").cwd("bar").run(); +} + +#[cargo_test] +fn relative_path_for_root_works() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + + [dependencies] + subproj = { path = "./subproj" } + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("subproj/Cargo.toml", &basic_manifest("subproj", "0.1.0")) + .file("subproj/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build --manifest-path ./Cargo.toml").run(); + + p.cargo("build --manifest-path ../Cargo.toml") + .cwd("subproj") + .run(); +} + +#[cargo_test] +fn path_dep_outside_workspace_is_not_member() { + let p = project() + .no_manifest() + .file( + "ws/Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + "#, + ) + .file("ws/src/lib.rs", r"extern crate foo;") + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").cwd("ws").run(); +} + +#[cargo_test] +fn test_in_and_out_of_workspace() { + let p = project() + .no_manifest() + .file( + "ws/Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + members = [ "../bar" ] + "#, + ) + .file( + "ws/src/lib.rs", + r"extern crate foo; pub fn f() { foo::f() }", + ) + .file( + "foo/Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "../bar" } + "#, + ) + .file( + "foo/src/lib.rs", + "extern crate bar; pub fn f() { bar::f() }", + ) + .file( + "bar/Cargo.toml", + r#" + [project] + workspace = "../ws" + name = "bar" + version = "0.1.0" + authors = [] + "#, + ) + .file("bar/src/lib.rs", "pub fn f() { }"); + let p = p.build(); + + p.cargo("build").cwd("ws").run(); + + assert!(p.root().join("ws/Cargo.lock").is_file()); + assert!(p.root().join("ws/target").is_dir()); + assert!(!p.root().join("foo/Cargo.lock").is_file()); + assert!(!p.root().join("foo/target").is_dir()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); + assert!(!p.root().join("bar/target").is_dir()); + + p.cargo("build").cwd("foo").run(); + assert!(p.root().join("foo/Cargo.lock").is_file()); + assert!(p.root().join("foo/target").is_dir()); + assert!(!p.root().join("bar/Cargo.lock").is_file()); + assert!(!p.root().join("bar/target").is_dir()); +} + +#[cargo_test] +fn test_path_dependency_under_member() { + let p = project() + .file( + "ws/Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "../foo" } + + [workspace] + "#, + ) + .file( + "ws/src/lib.rs", + r"extern crate foo; pub fn f() { foo::f() }", + ) + .file( + "foo/Cargo.toml", + r#" + [project] + workspace = "../ws" + name = "foo" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "./bar" } + "#, + ) + .file( + "foo/src/lib.rs", + "extern crate bar; pub fn f() { bar::f() }", + ) + .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("foo/bar/src/lib.rs", "pub fn f() { }"); + let p = p.build(); + + p.cargo("build").cwd("ws").run(); + + assert!(!p.root().join("foo/bar/Cargo.lock").is_file()); + assert!(!p.root().join("foo/bar/target").is_dir()); + + p.cargo("build").cwd("foo/bar").run(); + + assert!(!p.root().join("foo/bar/Cargo.lock").is_file()); + assert!(!p.root().join("foo/bar/target").is_dir()); +} + +#[cargo_test] +fn excluded_simple() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [workspace] + exclude = ["foo"] + "#, + ) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.root().join("target").is_dir()); + p.cargo("build").cwd("foo").run(); + assert!(p.root().join("foo/target").is_dir()); +} + +#[cargo_test] +fn exclude_members_preferred() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [workspace] + members = ["foo/bar"] + exclude = ["foo"] + "#, + ) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", "") + .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("foo/bar/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.root().join("target").is_dir()); + p.cargo("build").cwd("foo").run(); + assert!(p.root().join("foo/target").is_dir()); + p.cargo("build").cwd("foo/bar").run(); + assert!(!p.root().join("foo/bar/target").is_dir()); +} + +#[cargo_test] +fn exclude_but_also_depend() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "ws" + version = "0.1.0" + authors = [] + + [dependencies] + bar = { path = "foo/bar" } + + [workspace] + exclude = ["foo"] + "#, + ) + .file("src/lib.rs", "") + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", "") + .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("foo/bar/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.root().join("target").is_dir()); + p.cargo("build").cwd("foo").run(); + assert!(p.root().join("foo/target").is_dir()); + p.cargo("build").cwd("foo/bar").run(); + assert!(p.root().join("foo/bar/target").is_dir()); +} + +#[cargo_test] +fn glob_syntax() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/*"] + exclude = ["crates/qux"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file( + "crates/bar/Cargo.toml", + r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../.." + "#, + ) + .file("crates/bar/src/main.rs", "fn main() {}") + .file( + "crates/baz/Cargo.toml", + r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../.." + "#, + ) + .file("crates/baz/src/main.rs", "fn main() {}") + .file( + "crates/qux/Cargo.toml", + r#" + [project] + name = "qux" + version = "0.1.0" + authors = [] + "#, + ) + .file("crates/qux/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + assert!(!p.bin("bar").is_file()); + assert!(!p.bin("baz").is_file()); + + p.cargo("build").cwd("crates/bar").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("bar").is_file()); + + p.cargo("build").cwd("crates/baz").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("baz").is_file()); + + p.cargo("build").cwd("crates/qux").run(); + assert!(!p.bin("qux").is_file()); + + assert!(p.root().join("Cargo.lock").is_file()); + assert!(!p.root().join("crates/bar/Cargo.lock").is_file()); + assert!(!p.root().join("crates/baz/Cargo.lock").is_file()); + assert!(p.root().join("crates/qux/Cargo.lock").is_file()); +} + +/*FIXME: This fails because of how workspace.exclude and workspace.members are working. +#[cargo_test] +fn glob_syntax_2() { + let p = project() + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/b*"] + exclude = ["crates/q*"] + "#) + .file("src/main.rs", "fn main() {}") + .file("crates/bar/Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/bar/src/main.rs", "fn main() {}") + .file("crates/baz/Cargo.toml", r#" + [project] + name = "baz" + version = "0.1.0" + authors = [] + workspace = "../.." + "#) + .file("crates/baz/src/main.rs", "fn main() {}") + .file("crates/qux/Cargo.toml", r#" + [project] + name = "qux" + version = "0.1.0" + authors = [] + "#) + .file("crates/qux/src/main.rs", "fn main() {}"); + p.build(); + + p.cargo("build").run(); + assert!(p.bin("foo").is_file()); + assert!(!p.bin("bar").is_file()); + assert!(!p.bin("baz").is_file()); + + p.cargo("build").cwd("crates/bar").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("bar").is_file()); + + p.cargo("build").cwd("crates/baz").run(); + assert!(p.bin("foo").is_file()); + assert!(p.bin("baz").is_file()); + + p.cargo("build").cwd("crates/qux").run(); + assert!(!p.bin("qux").is_file()); + + assert!(p.root().join("Cargo.lock").is_file()); + assert!(!p.root().join("crates/bar/Cargo.lock").is_file()); + assert!(!p.root().join("crates/baz/Cargo.lock").is_file()); + assert!(p.root().join("crates/qux/Cargo.lock").is_file()); +} +*/ + +#[cargo_test] +fn glob_syntax_invalid_members() { + let p = project() + .file( + "Cargo.toml", + r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + + [workspace] + members = ["crates/*"] + "#, + ) + .file("src/main.rs", "fn main() {}") + .file("crates/bar/src/main.rs", "fn main() {}"); + let p = p.build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +error: failed to read `[..]Cargo.toml` + +Caused by: + [..] +", + ) + .run(); +} + +/// This is a freshness test for feature use with workspaces. +/// +/// `feat_lib` is used by `caller1` and `caller2`, but with different features enabled. +/// This test ensures that alternating building `caller1`, `caller2` doesn't force +/// recompile of `feat_lib`. +/// +/// Ideally, once we solve rust-lang/cargo#3620, then a single Cargo build at the top level +/// will be enough. +#[cargo_test] +fn dep_used_with_separate_features() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["feat_lib", "caller1", "caller2"] + "#, + ) + .file( + "feat_lib/Cargo.toml", + r#" + [project] + name = "feat_lib" + version = "0.1.0" + authors = [] + + [features] + myfeature = [] + "#, + ) + .file("feat_lib/src/lib.rs", "") + .file( + "caller1/Cargo.toml", + r#" + [project] + name = "caller1" + version = "0.1.0" + authors = [] + + [dependencies] + feat_lib = { path = "../feat_lib" } + "#, + ) + .file("caller1/src/main.rs", "fn main() {}") + .file("caller1/src/lib.rs", "") + .file( + "caller2/Cargo.toml", + r#" + [project] + name = "caller2" + version = "0.1.0" + authors = [] + + [dependencies] + feat_lib = { path = "../feat_lib", features = ["myfeature"] } + caller1 = { path = "../caller1" } + "#, + ) + .file("caller2/src/main.rs", "fn main() {}") + .file("caller2/src/lib.rs", ""); + let p = p.build(); + + // Build the entire workspace. + p.cargo("build --all") + .with_stderr( + "\ +[..]Compiling feat_lib v0.1.0 ([..]) +[..]Compiling caller1 v0.1.0 ([..]) +[..]Compiling caller2 v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + assert!(p.bin("caller1").is_file()); + assert!(p.bin("caller2").is_file()); + + // Build `caller1`. Should build the dep library. Because the features + // are different than the full workspace, it rebuilds. + // Ideally once we solve rust-lang/cargo#3620, then a single Cargo build at the top level + // will be enough. + p.cargo("build") + .cwd("caller1") + .with_stderr( + "\ +[..]Compiling feat_lib v0.1.0 ([..]) +[..]Compiling caller1 v0.1.0 ([..]) +[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] +", + ) + .run(); + + // Alternate building `caller2`/`caller1` a few times, just to make sure + // features are being built separately. Should not rebuild anything. + p.cargo("build") + .cwd("caller2") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + p.cargo("build") + .cwd("caller1") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); + p.cargo("build") + .cwd("caller2") + .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") + .run(); +} + +#[cargo_test] +fn dont_recurse_out_of_cargo_home() { + let git_project = git::new("dep", |project| { + project + .file("Cargo.toml", &basic_manifest("dep", "0.1.0")) + .file("src/lib.rs", "") + .file( + "build.rs", + r#" + use std::env; + use std::path::Path; + use std::process::{self, Command}; + + fn main() { + let cargo = env::var_os("CARGO").unwrap(); + let cargo_manifest_dir = env::var_os("CARGO_MANIFEST_DIR").unwrap(); + let output = Command::new(cargo) + .args(&["metadata", "--format-version", "1", "--manifest-path"]) + .arg(&Path::new(&cargo_manifest_dir).join("Cargo.toml")) + .output() + .unwrap(); + if !output.status.success() { + eprintln!("{}", String::from_utf8(output.stderr).unwrap()); + process::exit(1); + } + } + "#, + ) + }) + .unwrap(); + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies.dep] + git = "{}" + + [workspace] + "#, + git_project.url() + ), + ) + .file("src/lib.rs", ""); + let p = p.build(); + + p.cargo("build") + .env("CARGO_HOME", p.root().join(".cargo")) + .run(); +} + +// FIXME: this fails because of how workspace.exclude and workspace.members are working. +/* +#[cargo_test] +fn include_and_exclude() { + let p = project() + .file("Cargo.toml", r#" + [workspace] + members = ["foo"] + exclude = ["foo/bar"] + "#) + .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) + .file("foo/src/lib.rs", "") + .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) + .file("foo/bar/src/lib.rs", ""); + p.build(); + + p.cargo("build").cwd("foo").run(); + assert!(p.root().join("target").is_dir()); + assert!(!p.root().join("foo/target").is_dir()); + p.cargo("build").cwd("foo/bar").run(); + assert!(p.root().join("foo/bar/target").is_dir()); +} +*/ + +#[cargo_test] +fn cargo_home_at_root_works() { + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [workspace] + members = ["a"] + "#, + ) + .file("src/lib.rs", "") + .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) + .file("a/src/lib.rs", ""); + let p = p.build(); + + p.cargo("build").run(); + p.cargo("build --frozen").env("CARGO_HOME", p.root()).run(); +} + +#[cargo_test] +fn relative_rustc() { + let p = project() + .file( + "src/main.rs", + r#" + use std::process::Command; + use std::env; + + fn main() { + let mut cmd = Command::new("rustc"); + for arg in env::args_os().skip(1) { + cmd.arg(arg); + } + std::process::exit(cmd.status().unwrap().code().unwrap()); + } + "#, + ) + .build(); + p.cargo("build").run(); + + let src = p + .root() + .join("target/debug/foo") + .with_extension(env::consts::EXE_EXTENSION); + + Package::new("a", "0.1.0").publish(); + + let p = project() + .at("lib") + .file( + "Cargo.toml", + r#" + [package] + name = "lib" + version = "0.1.0" + + [dependencies] + a = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + fs::copy(&src, p.root().join(src.file_name().unwrap())).unwrap(); + + let file = format!("./foo{}", env::consts::EXE_SUFFIX); + p.cargo("build").env("RUSTC", &file).run(); +} + +#[cargo_test] +fn ws_rustc_err() { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file("a/Cargo.toml", &basic_lib_manifest("a")) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("rustc") + .with_status(101) + .with_stderr("[ERROR] [..]against an actual package[..]") + .run(); + + p.cargo("rustdoc") + .with_status(101) + .with_stderr("[ERROR] [..]against an actual package[..]") + .run(); +} + +#[cargo_test] +fn ws_err_unused() { + for key in &[ + "[lib]", + "[[bin]]", + "[[example]]", + "[[test]]", + "[[bench]]", + "[dependencies]", + "[dev-dependencies]", + "[build-dependencies]", + "[features]", + "[target]", + "[badges]", + ] { + let p = project() + .file( + "Cargo.toml", + &format!( + r#" + [workspace] + members = ["a"] + + {} + "#, + key + ), + ) + .file("a/Cargo.toml", &basic_lib_manifest("a")) + .file("a/src/lib.rs", "") + .build(); + p.cargo("check") + .with_status(101) + .with_stderr(&format!( + "\ +[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` + +Caused by: + virtual manifests do not specify {} +", + key + )) + .run(); + } +} + +#[cargo_test] +fn ws_warn_unused() { + for (key, name) in &[ + ("[profile.dev]\nopt-level = 1", "profiles"), + ("[replace]\n\"bar:0.1.0\" = { path = \"bar\" }", "replace"), + ("[patch.crates-io]\nbar = { path = \"bar\" }", "patch"), + ] { + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file( + "a/Cargo.toml", + &format!( + r#" + [package] + name = "a" + version = "0.1.0" + + {} + "#, + key + ), + ) + .file("a/src/lib.rs", "") + .build(); + p.cargo("check") + .with_stderr_contains(&format!( + "\ +[WARNING] {} for the non root package will be ignored, specify {} at the workspace root: +package: [..]/foo/a/Cargo.toml +workspace: [..]/foo/Cargo.toml +", + name, name + )) + .run(); + } +} + +#[cargo_test] +fn ws_warn_path() { + // Warnings include path to manifest. + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + cargo-features = ["edition"] + [package] + name = "foo" + version = "0.1.0" + "#, + ) + .file("a/src/lib.rs", "") + .build(); + + p.cargo("check") + .with_stderr_contains("[WARNING] [..]/foo/a/Cargo.toml: the cargo feature `edition`[..]") + .run(); +} + +#[cargo_test] +fn invalid_missing() { + // Warnings include path to manifest. + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + x = { path = 'x' } + "#, + ) + .file("src/lib.rs", "") + .build(); + + p.cargo("build -q") + .with_status(101) + .with_stderr( + "\ +error: [..] + +Caused by: + [..] + +Caused by: + [..] + +Caused by: + [..]", + ) + .run(); +}