diff --git a/.travis.yml b/.travis.yml index 731d1a5f272..9046991de40 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,49 +1,153 @@ language: rust -rust: - - stable - - beta - - nightly -sudo: false +rust: stable +sudo: required +dist: trusty +os: linux +services: + - docker + +matrix: + include: + # stable linux builds, tested + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + IMAGE=dist + MAKE_TARGETS="test distcheck doc install uninstall" + - env: TARGET=i686-unknown-linux-gnu + IMAGE=dist + MAKE_TARGETS=test-unit-i686-unknown-linux-gnu + CFG_DISABLE_CROSS_TESTS=1 + + # stable osx builds, tested + - env: TARGET=x86_64-apple-darwin + ALT=i686-apple-darwin + MAKE_TARGETS="test distcheck doc install uninstall" + MACOSX_DEPLOYMENT_TARGET=10.7 + os: osx + - env: TARGET=i686-apple-darwin + MAKE_TARGETS=test + MACOSX_DEPLOYMENT_TARGET=10.7 + CFG_DISABLE_CROSS_TESTS=1 + os: osx + install: brew uninstall openssl && brew install openssl --universal --without-test + + # stable musl target, tested + - env: TARGET=x86_64-unknown-linux-musl + IMAGE=x86_64-musl + CFG_DISABLE_CROSS_TESTS=1 + MAKE_TARGETS=test-unit-$TARGET + + # cross compiled targets + - env: TARGET=arm-unknown-linux-gnueabi + IMAGE=cross + - env: TARGET=arm-unknown-linux-gnueabihf + IMAGE=cross + - env: TARGET=armv7-unknown-linux-gnueabihf + IMAGE=cross + - env: TARGET=aarch64-unknown-linux-gnu + IMAGE=cross + - env: TARGET=i686-unknown-freebsd + IMAGE=cross + - env: TARGET=x86_64-unknown-freebsd + IMAGE=cross + - env: TARGET=x86_64-unknown-netbsd + IMAGE=cross + - env: TARGET=mips-unknown-linux-gnu + IMAGE=cross + - env: TARGET=mipsel-unknown-linux-gnu + IMAGE=cross + - env: TARGET=mips64-unknown-linux-gnuabi64 + IMAGE=cross + rust: nightly + - env: TARGET=mips64el-unknown-linux-gnuabi64 + IMAGE=cross + rust: nightly + - env: TARGET=s390x-unknown-linux-gnu + IMAGE=cross + rust: nightly + - env: TARGET=powerpc-unknown-linux-gnu + IMAGE=cross + rust: beta + - env: TARGET=powerpc64-unknown-linux-gnu + IMAGE=cross + rust: beta + - env: TARGET=powerpc64le-unknown-linux-gnu + IMAGE=cross + rust: beta + + # beta/nightly builds + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + IMAGE=dist + MAKE_TARGETS="test distcheck doc install uninstall" + DEPLOY=0 + rust: beta + - env: TARGET=x86_64-unknown-linux-gnu + ALT=i686-unknown-linux-gnu + IMAGE=dist + MAKE_TARGETS="test distcheck doc install uninstall" + DEPLOY=0 + rust: nightly + + exclude: + - rust: stable + +before_script: + - curl https://static.rust-lang.org/rustup.sh | + sh -s -- --add-target=$TARGET --disable-sudo -y --prefix=`rustc --print sysroot` + - if [ ! -z "$ALT" ]; then + curl https://static.rust-lang.org/rustup.sh | + sh -s -- --add-target=$ALT --disable-sudo -y --prefix=`rustc --print sysroot`; + fi script: - - ./configure --prefix=$HOME/cargo-install --disable-cross-tests --disable-optimize - - make - - make test - - make distcheck - - make doc - - make install - - make uninstall + - if [ "$TRAVIS_OS_NAME" = "osx" ]; then + SRC=. src/ci/run.sh $TARGET; + else + src/ci/docker/run.sh $IMAGE $TARGET; + fi after_success: | [ $TRAVIS_BRANCH = master ] && [ $TRAVIS_PULL_REQUEST = false ] && [ $(uname -s) = Linux ] && pip install ghp-import --user $USER && $HOME/.local/bin/ghp-import -n target/doc && - git push -qf https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages + git push -qf https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages + env: global: - # apparently we use too much memory and if there's more than one rustc then - # when compiling Cargo's unit tests some compilers will be randomly kill - # -9'd - - CARGOFLAGS=-j1 - - secure: scGpeetUfba5RWyuS4yt10bPoFAI9wpHEReIFqEx7eH5vr2Anajk6+70jW6GdrWVdUvdINiArlQ3An2DeB9vEUWcBjw8WvuPtOH0tDMoSsuVloPlFD8yn1Ac0Bx9getAO5ofxqtoNg+OV4MDVuGabEesqAOWqURNrBC7XK+ntC8= + - DEPLOY=1 + - secure: LB2o9UL90Z4CVOLVQsTbZr7ZBLA1dCLxFODuCkPkbdqG3Kl5z1yMIPMRvSbjp9KwBlIgm+Mg0R1iqphKVq+rVP5zo96K4+kEQMG+zWsPb23ZKTxiL8MK5VgCZ7s9AONCvNeCTCNAG3EyeciFr5Zr9eygVCfo0WF6JsPujYYQZx0= -matrix: - include: - - os: osx - rust: stable - before_install: - - export OPENSSL_INCLUDE_DIR=`brew --prefix openssl`/include - - export OPENSSL_LIB_DIR=`brew --prefix openssl`/lib +notifications: + email: + on_success: never branches: only: - - master - -addons: - apt: - sources: - - kalakris-cmake - packages: - - cmake - - g++-multilib - - lib32stdc++6 + - master + - auto-cargo + +before_deploy: + - mkdir -p deploy/$TRAVIS_COMMIT + - cp target/$TARGET/release/dist/cargo-nightly-$TARGET.tar.gz + deploy/$TRAVIS_COMMIT + +deploy: + - provider: s3 + bucket: rust-lang-cargo-dev + skip_cleanup: true + local_dir: deploy + upload_dir: cargo-master + acl: public_read + region: us-west-1 + access_key_id: AKIAIWZDM2B2IJOWBGTA + secret_access_key: + secure: NB9b/MhIDiv8OtNiN/sHaFgA3xG2fa7MGuQQKJNj80ktvgByzDm5UPNyNeoYx9SmJ3jOWobgcPVaoUd2S+6XgO3bMBqm7sM/oMeE0KdqToh6+V2bKfyRF2U5fm697LEGepPIBYqMLDg4nr/dbknbKltzp6dAfJRyy22Nb721zPQ= + on: + branch: auto-cargo + condition: $DEPLOY = 1 + +cache: + directories: + - $HOME/.cargo + - target/openssl diff --git a/Cargo.lock b/Cargo.lock index 9591597eb8e..9853a26f1ce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,38 +1,38 @@ [root] name = "cargo" -version = "0.14.0" +version = "0.15.0" dependencies = [ "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "cargotest 0.1.0", "crates-io 0.4.0", - "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", - "curl 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "docopt 0.6.82 (registry+https://github.com/rust-lang/crates.io-index)", - "env_logger 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "docopt 0.6.86 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "fs2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "git2-curl 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "fs2 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libgit2-sys 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -53,11 +53,6 @@ dependencies = [ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "bitflags" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "bitflags" version = "0.7.0" @@ -73,19 +68,19 @@ name = "cargotest" version = "0.1.0" dependencies = [ "bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "cargo 0.14.0", + "cargo 0.15.0", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -96,68 +91,69 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "cmake" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "crates-io" version = "0.4.0" dependencies = [ - "curl 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "crossbeam" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "curl" -version = "0.3.9" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl-sys 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "curl-sys" -version = "0.2.4" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libz-sys 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "docopt" -version = "0.6.82" +version = "0.6.86" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", - "strsim 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", + "strsim 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "env_logger" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -179,7 +175,7 @@ dependencies = [ [[package]] name = "fs2" -version = "0.2.5" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -189,7 +185,7 @@ dependencies = [ [[package]] name = "gcc" -version = "0.3.35" +version = "0.3.38" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -203,24 +199,26 @@ dependencies = [ [[package]] name = "git2" -version = "0.4.4" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bitflags 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libgit2-sys 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libgit2-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "git2-curl" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "git2 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "git2 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -234,7 +232,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -242,7 +240,7 @@ name = "idna" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -258,7 +256,7 @@ dependencies = [ [[package]] name = "lazy_static" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -268,44 +266,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "libgit2-sys" -version = "0.4.5" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libssh2-sys 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", - "libz-sys 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)", + "libssh2-sys 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "libressl-pnacl-sys" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "pnacl-build-helper 1.4.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "libssh2-sys" -version = "0.1.39" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libz-sys 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)", + "libz-sys 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libz-sys" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -317,7 +308,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "matches" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -333,7 +324,7 @@ name = "miniz-sys" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -381,7 +372,7 @@ dependencies = [ "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -390,7 +381,7 @@ version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -418,7 +409,7 @@ dependencies = [ "num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -428,7 +419,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "num_cpus" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", @@ -436,52 +427,36 @@ dependencies = [ [[package]] name = "openssl" -version = "0.7.14" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys-extras 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "openssl-probe" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "openssl-sys" -version = "0.7.14" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "libressl-pnacl-sys 2.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "openssl-sys-extras" -version = "0.7.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "pkg-config" version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "pnacl-build-helper" -version = "1.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "psapi-sys" version = "0.1.0" @@ -501,24 +476,24 @@ dependencies = [ [[package]] name = "regex" -version = "0.1.77" +version = "0.1.80" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "thread_local 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "regex-syntax" -version = "0.3.5" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "rustc-serialize" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -534,13 +509,13 @@ name = "semver-parser" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "strsim" -version = "0.3.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -580,7 +555,7 @@ dependencies = [ [[package]] name = "thread_local" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -588,10 +563,10 @@ dependencies = [ [[package]] name = "toml" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -599,7 +574,7 @@ name = "unicode-bidi" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -609,11 +584,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "url" -version = "1.2.0" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -652,35 +627,33 @@ dependencies = [ [metadata] "checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a" "checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66" -"checksum bitflags 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2a6577517ecd0ee0934f48a7295a89aaef3e6dfafeac404f94c0b3448518ddfe" "checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" "checksum bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7b48dbe2ff0e98fa2f03377d204a9637d3c9816cd431bfe05a8abbd0ea11d074" "checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c" -"checksum cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "dfcf5bcece56ef953b8ea042509e9dcbdfe97820b7e20d86beb53df30ed94978" -"checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc" -"checksum curl 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "faf54d927c752b092d3e99ea227d9c7c9b4a3e885a3368ac9bfa28958f215100" -"checksum curl-sys 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4f198d10378a3bc1f1b0e3bc3a2de5c9bb9e08938460dec57ba6667d9a65fbc3" -"checksum docopt 0.6.82 (registry+https://github.com/rust-lang/crates.io-index)" = "8f20016093b4e545dccf6ad4a01099de0b695f9bc99b08210e68f6425db2d37d" -"checksum env_logger 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "82dcb9ceed3868a03b335657b85a159736c961900f7e7747d3b0b97b9ccb5ccb" +"checksum cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "0e5bcf27e097a184c1df4437654ed98df3d7a516e8508a6ba45d8b092bbdf283" +"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97" +"checksum curl 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8fd5a1fdcebdb1a59578c5583e66ffed2d13850eac4f51ff730edf6dd6111eac" +"checksum curl-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8e0016cc7b33b00fb7e7f6a314d8ee40748b13f377832ed9ff9e59dbb7f7ad27" +"checksum docopt 0.6.86 (registry+https://github.com/rust-lang/crates.io-index)" = "4a7ef30445607f6fc8720f0a0a2c7442284b629cf0d049286860fae23e71c4d9" +"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f" "checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922" "checksum flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "3eeb481e957304178d2e782f2da1257f1434dfecbae883bafb61ada2a9fea3bb" -"checksum fs2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "bcd414e5a1a979b931bb92f41b7a54106d3f6d2e6c253e9ce943b7cd468251ef" -"checksum gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)" = "91ecd03771effb0c968fd6950b37e89476a578aaf1c70297d8e92b6516ec3312" +"checksum fs2 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "640001e1bd865c7c32806292822445af576a6866175b5225aa2087ca5e3de551" +"checksum gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)" = "553f11439bdefe755bf366b264820f1da70f3aaf3924e594b886beb9c831bcf5" "checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518" -"checksum git2 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "33a96eeef227403006cdb59ea6e05baad8cddde6b79abed753d96ccee136bad2" -"checksum git2-curl 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d5f766d804e3cf2b90e16ab77c3ddedcb1ca5d2456cadb7b3f907345f8c3498" +"checksum git2 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ae93eae026f17b013912629d243444e52ee5d6b1339e71d5212099d1d1b73fc2" +"checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4" "checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f" +"checksum lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6abe0ee2e758cd6bc8a2cd56726359007748fbf4128da998b65d0b70f881e19b" "checksum libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "044d1360593a78f5c8e5e710beccdc24ab71d1f01bc19a29bcacdba22e8475d8" -"checksum libgit2-sys 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3293dc95169a6351c5a03eca4bf5549f3a9a06336a000315876ff1165a5fba10" -"checksum libressl-pnacl-sys 2.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "cbc058951ab6a3ef35ca16462d7642c4867e6403520811f28537a4e2f2db3e71" -"checksum libssh2-sys 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "1debd7e56d19655eb786f827675dc55f6d530de6d7b81e76d13d1afc635d6c07" -"checksum libz-sys 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "40f2df7730b5d29426c3e44ce4d088d8c5def6471c2c93ba98585b89fb201ce6" +"checksum libgit2-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7ba5bccd2adaf5f251b478000c27532e050be86baa3ebf8c76bb6a7f3c82ef35" +"checksum libssh2-sys 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "538844618f14e5e919332beaf718cf22b63b18cb9b37370560cd1bc55b2734f8" +"checksum libz-sys 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "283c2d162f78c5090522e13fc809820c33181570ae40de1bea84f3864c8759f9" "checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054" -"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e" +"checksum matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efd7622e3022e1a6eaa602c4cea8912254e5582c9c692e9167714182244801b1" "checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" "checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54" "checksum miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bfc6782530ac8ace97af10a540054a37126b63b0702ddaaa243b73b5745b9a" @@ -692,29 +665,28 @@ dependencies = [ "checksum num-iter 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "287a1c9969a847055e1122ec0ea7a5c5d6f72aad97934e131c83d5c08ab4e45c" "checksum num-rational 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "54ff603b8334a72fbb27fe66948aac0abaaa40231b3cecd189e76162f6f38aaf" "checksum num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a16a42856a256b39c6d3484f097f6713e14feacd9bfb02290917904fae46c81c" -"checksum num_cpus 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a859041cbf7a70ea1ece4b87d1a2c6ef364dcb68749c88db1f97304b9ec09d5f" -"checksum openssl 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "c4117b6244aac42ed0150a6019b4d953d28247c5dd6ae6f46ae469b5f2318733" -"checksum openssl-sys 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ac5e9d911dd4c3202bbf4139b73bc7a1231f7d0a39432c6f893745f0e04120" -"checksum openssl-sys-extras 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "11c5e1dba7d3d03d80f045bf0d60111dc69213b67651e7c889527a3badabb9fa" +"checksum num_cpus 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8890e6084723d57d0df8d2720b0d60c6ee67d6c93e7169630e4371e88765dcad" +"checksum openssl 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1eb2a714828f5528e4a24a07c296539216f412364844d61fe1161f94558455d4" +"checksum openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "756d49c8424483a3df3b5d735112b4da22109ced9a8294f1f5cdf80fb3810919" +"checksum openssl-sys 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "95e9fb08acc32509fac299d6e5f4932e1e055bb70d764282c3ed8beaa87ab0e9" "checksum pkg-config 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8cee804ecc7eaf201a4a207241472cc870e825206f6c031e3ee2a72fa425f2fa" -"checksum pnacl-build-helper 1.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "61c9231d31aea845007443d62fcbb58bb6949ab9c18081ee1e09920e0cf1118b" "checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478" "checksum rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "2791d88c6defac799c3f20d74f094ca33b9332612d9aef9078519c82e4fe04a5" -"checksum regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)" = "64b03446c466d35b42f2a8b203c8e03ed8b91c0f17b56e1f84f7210a257aa665" -"checksum regex-syntax 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279401017ae31cf4e15344aa3f085d0e2e5c1e70067289ef906906fdbe92c8fd" -"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b" +"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f" +"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957" +"checksum rustc-serialize 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)" = "bff9fc1c79f2dec76b253273d07682e94a978bd8f132ded071188122b2af9818" "checksum semver 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ae2ff60ecdb19c255841c066cbfa5f8c2a4ada1eb3ae47c77ab6667128da71f5" "checksum semver-parser 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e88e43a5a74dd2a11707f9c21dfd4a423c66bd871df813227bb0a3e78f3a1ae9" -"checksum strsim 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e4d73a2c36a4d095ed1a6df5cbeac159863173447f7a82b3f4757426844ab825" +"checksum strsim 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "50c069df92e4b01425a8bf3576d5d417943a6a7272fbabaf5bd80b1aaa76442e" "checksum tar 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "12e1b959f637c2e4c69dbdbf4d7dc609edbaada9b8c35d0c2fc9802d02383b65" "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" "checksum term 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3deff8a2b3b6607d6d7cc32ac25c0b33709453ca9cceac006caac51e963cf94a" "checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03" -"checksum thread_local 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "55dd963dbaeadc08aa7266bf7f91c3154a7805e32bb94b820b769d2ef3b4744d" -"checksum toml 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a442dfc13508e603c3f763274361db7f79d7469a0e95c411cde53662ab30fc72" +"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5" +"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4" "checksum unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f7ceb96afdfeedee42bade65a0d585a6a0106f681b6749c8ff4daa8df30b3f" "checksum unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "26643a2f83bac55f1976fb716c10234485f9202dcd65cfbdf9da49867b271172" -"checksum url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afe9ec54bc4db14bc8744b7fed060d785ac756791450959b2248443319d5b119" +"checksum url 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "48ccf7bd87a81b769cf84ad556e034541fb90e1cd6d4bc375c822ed9500cd9d7" "checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47" "checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" diff --git a/Cargo.toml b/Cargo.toml index 99b15db7d2a..fa10f385736 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cargo" -version = "0.14.0" +version = "0.15.0" authors = ["Yehuda Katz ", "Carl Lerche ", "Alex Crichton "] @@ -20,18 +20,18 @@ path = "src/cargo/lib.rs" advapi32-sys = "0.2" crates-io = { path = "src/crates-io", version = "0.4" } crossbeam = "0.2" -curl = "0.3" +curl = "0.4" docopt = "0.6" env_logger = "0.3" filetime = "0.1" flate2 = "0.2" -fs2 = "0.2" -git2 = "0.4" -git2-curl = "0.5" +fs2 = "0.3" +git2 = "0.6" +git2-curl = "0.7" glob = "0.2" kernel32-sys = "0.2" libc = "0.2" -libgit2-sys = "0.4" +libgit2-sys = "0.6" log = "0.3" miow = "0.1" num_cpus = "1.0" @@ -47,7 +47,7 @@ url = "1.1" winapi = "0.2" [target.'cfg(unix)'.dependencies] -openssl = "0.7" +openssl = "0.9" [dev-dependencies] hamcrest = "0.1" diff --git a/Makefile.in b/Makefile.in index 08dc61626ee..2af0c33e3b4 100644 --- a/Makefile.in +++ b/Makefile.in @@ -1,4 +1,4 @@ -CFG_RELEASE_NUM=0.14.0 +CFG_RELEASE_NUM=0.15.0 CFG_RELEASE_LABEL= OPENSSL_VERS=1.0.2j @@ -6,11 +6,6 @@ OPENSSL_SHA256=e7aff292be21c259c6af26469c7a9b3ba26e9abaaffd325e3dccc9785256c431 include config.mk -ifneq ($(CFG_LOCAL_RUST_ROOT),) -export LD_LIBRARY_PATH := $(CFG_LOCAL_RUST_ROOT)/lib:$(LD_LIBRARY_PATH) -export DYLD_LIBRARY_PATH := $(CFG_LOCAL_RUST_ROOT)/lib:$(DYLD_LIBRARY_PATH) -endif - export PATH := $(dir $(CFG_RUSTC)):$(PATH) ifdef CFG_ENABLE_NIGHTLY @@ -84,44 +79,45 @@ $(foreach target,$(CFG_TARGET),$(eval $(call DIST_TARGET,$(target)))) ifdef CFG_LOCAL_CARGO CARGO := $(CFG_LOCAL_CARGO) else -CARGO := $(TARGET_ROOT)/snapshot/bin/cargo$(X) +CARGO := $(CFG_CARGO) endif all: $(foreach target,$(CFG_TARGET),cargo-$(target)) define CARGO_TARGET -cargo-$(1): $$(CARGO) target/openssl/$(1).stamp +cargo-$(1): target/openssl/$(1).stamp $$(CFG_RUSTC) -V $$(CARGO) --version $$(CARGO) build --target $(1) \ --manifest-path $(S)Cargo.toml \ $$(OPT_FLAG) $$(CARGOFLAGS) $$(VERBOSE_FLAG) $$(ARGS) -test-unit-$(1): $$(CARGO) - @mkdir -p target/$(1)/cit - $$(CARGO) test --target $(1) $$(CARGOFLAGS) $$(VERBOSE_FLAG) $$(only) +test-unit-$(1): target/openssl/$(1).stamp cargo-$(1) + @mkdir -p $$(CFG_BUILD_DIR)/target/$(1)/cit + $$(CARGO) test --target $(1) \ + --manifest-path $(S)Cargo.toml \ + $$(OPT_FLAG) $$(CARGOFLAGS) $$(VERBOSE_FLAG) $$(only) endef $(foreach target,$(CFG_TARGET),$(eval $(call CARGO_TARGET,$(target)))) -$(TARGET_ROOT)/snapshot/bin/cargo$(X): $(S)src/snapshots.txt - $(CFG_PYTHON) $(S)src/etc/dl-snapshot.py $(CFG_BUILD) - touch $@ - - # === Tests test: style no-exes $(foreach target,$(CFG_TARGET),test-unit-$(target)) style: - sh tests/check-style.sh + (cd $(S) && sh tests/check-style.sh) +ifeq ($(CFG_GIT),) +no-exes: +else no-exes: - find $$(git ls-files) -type f \ + (cd $(S) && find $$($(CFG_GIT) ls-files) -type f \ \( -perm -u+x -or -perm -g+x -or -perm -o+x \) \ -not -name configure -not -name '*.sh' -not -name '*.rs' \ -not -name '*.py' -not -wholename "*/rust-installer/*" | \ grep '.*' \ - && exit 1 || exit 0 + && exit 1 || exit 0) +endif # === Misc @@ -143,9 +139,9 @@ DOC_OPTS := --markdown-no-toc \ --markdown-css stylesheets/normalize.css \ --markdown-css stylesheets/all.css \ --markdown-css stylesheets/prism.css \ - --html-in-header src/doc/html-headers.html \ - --html-before-content src/doc/header.html \ - --html-after-content src/doc/footer.html + --html-in-header $(S)src/doc/html-headers.html \ + --html-before-content $(S)src/doc/header.html \ + --html-after-content $(S)src/doc/footer.html ASSETS := CNAME images/noise.png images/forkme.png images/Cargo-Logo-Small.png \ stylesheets/all.css stylesheets/normalize.css javascripts/prism.js \ javascripts/all.js stylesheets/prism.css images/circle-with-i.png \ @@ -156,14 +152,19 @@ doc: $(foreach doc,$(DOCS),target/doc/$(doc).html) \ $(foreach asset,$(ASSETS),target/doc/$(asset)) \ target/doc/cargo/index.html -target/doc/cargo/index.html: - $(CARGO) doc --no-deps +target/doc/cargo/index.html: target/openssl/$(CFG_BUILD).stamp cargo-$(CFG_BUILD) + $(CARGO) doc --no-deps --target $(CFG_BUILD) \ + --manifest-path $(S)Cargo.toml $(OPT_FLAG) -$(DOC_DIR)/%.html: src/doc/%.md src/doc/html-headers.html src/doc/header.html src/doc/footer.html +$(DOC_DIR)/%.html: \ + $(S)src/doc/%.md \ + $(S)src/doc/html-headers.html \ + $(S)src/doc/header.html \ + $(S)src/doc/footer.html @mkdir -p $(@D) $(CFG_RUSTDOC) $< -o $(@D) $(DOC_OPTS) -$(DOC_DIR)/%: src/doc/% +$(DOC_DIR)/%: $(S)src/doc/% @mkdir -p $(@D) cp $< $@ @@ -173,6 +174,7 @@ OPENSSL_OS_arm-unknown-linux-gnueabihf := linux-armv4 OPENSSL_OS_armv7-unknown-linux-gnueabihf := linux-armv4 OPENSSL_OS_i686-unknown-freebsd := BSD-x86-elf OPENSSL_OS_i686-unknown-linux-gnu := linux-elf +OPENSSL_OS_i686-unknown-linux-musl := linux-elf OPENSSL_OS_mips-unknown-linux-gnu := linux-mips32 OPENSSL_OS_mipsel-unknown-linux-gnu := linux-mips32 OPENSSL_OS_mips64-unknown-linux-gnuabi64 := linux64-mips64 @@ -192,6 +194,7 @@ OPENSSL_AR_arm-unknown-linux-gnueabihf := arm-linux-gnueabihf-ar OPENSSL_AR_armv7-unknown-linux-gnueabihf := armv7-linux-gnueabihf-ar OPENSSL_AR_i686-unknown-freebsd := i686-unknown-freebsd10-ar OPENSSL_AR_i686-unknown-linux-gnu := ar +OPENSSL_AR_i686-unknown-linux-musl := ar OPENSSL_AR_mips-unknown-linux-gnu := mips-linux-gnu-ar OPENSSL_AR_mips64-unknown-linux-gnuabi64 := mips64-linux-gnuabi64-ar OPENSSL_AR_mips64el-unknown-linux-gnuabi64 := mips64el-linux-gnuabi64-ar @@ -210,6 +213,7 @@ OPENSSL_CC_arm-unknown-linux-gnueabihf := arm-linux-gnueabihf-gcc OPENSSL_CC_armv7-unknown-linux-gnueabihf := armv7-linux-gnueabihf-gcc OPENSSL_CC_i686-unknown-freebsd := i686-unknown-freebsd10-gcc OPENSSL_CC_i686-unknown-linux-gnu := gcc +OPENSSL_CC_i686-unknown-linux-musl := musl-gcc OPENSSL_CC_mips-unknown-linux-gnu := mips-linux-gnu-gcc OPENSSL_CC_mips64-unknown-linux-gnuabi64 := mips64-linux-gnuabi64-gcc OPENSSL_CC_mips64el-unknown-linux-gnuabi64 := mips64el-linux-gnuabi64-gcc @@ -225,6 +229,7 @@ OPENSSL_CC_x86_64-unknown-netbsd := x86_64-unknown-netbsd-gcc SETARCH_i686-unknown-linux-gnu := setarch i386 OPENSSL_CFLAGS_i686-unknown-linux-gnu := -m32 +OPENSSL_CFLAGS_i686-unknown-linux-musl := -m32 define BUILD_OPENSSL ifdef OPENSSL_OS_$(1) @@ -246,13 +251,9 @@ target/openssl/$(1).stamp: target/openssl/openssl-$$(OPENSSL_VERS).tar.gz \ # variables read by various build scripts to find openssl cargo-$(1): export OPENSSL_STATIC := 1 -cargo-$(1): export OPENSSL_ROOT_DIR := $$(OPENSSL_INSTALL_$(1)) -cargo-$(1): export OPENSSL_LIB_DIR := $$(OPENSSL_INSTALL_$(1))/lib -cargo-$(1): export OPENSSL_INCLUDE_DIR := $$(OPENSSL_INSTALL_$(1))/include +cargo-$(1): export OPENSSL_DIR := $$(OPENSSL_INSTALL_$(1)) test-unit-$(1): export OPENSSL_STATIC := 1 -test-unit-$(1): export OPENSSL_ROOT_DIR := $$(OPENSSL_INSTALL_$(1)) -test-unit-$(1): export OPENSSL_LIB_DIR := $$(OPENSSL_INSTALL_$(1))/lib -test-unit-$(1): export OPENSSL_INCLUDE_DIR := $$(OPENSSL_INSTALL_$(1))/include +test-unit-$(1): export OPENSSL_DIR := $$(OPENSSL_INSTALL_$(1)) # build libz statically into the cargo we're producing cargo-$(1): export LIBZ_SYS_STATIC := 1 diff --git a/appveyor.yml b/appveyor.yml index 88660ab3b3c..7187fc0d5eb 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,37 +1,76 @@ environment: - CFG_DISABLE_CROSS_TESTS: 1 matrix: - - TARGET: i686-pc-windows-msvc - MSVC: 1 - BITS: 32 - ARCH: x86 - - TARGET: x86_64-pc-windows-msvc - MSVC: 1 - BITS: 64 - ARCH: amd64 - TARGET: x86_64-pc-windows-gnu ARCH: amd64 BITS: 64 + CFG_DISABLE_CROSS_TESTS: 1 + MAKE_TARGETS: test-unit-x86_64-pc-windows-gnu - TARGET: i686-pc-windows-gnu ARCH: x86 BITS: 32 MINGW_URL: https://s3.amazonaws.com/rust-lang-ci MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z MINGW_DIR: mingw32 + CFG_DISABLE_CROSS_TESTS: 1 + MAKE_TARGETS: test-unit-i686-pc-windows-gnu + - TARGET: i686-pc-windows-msvc + BITS: 32 + ARCH: x86 + MAKE_TARGETS: test-unit-i686-pc-windows-msvc + CFG_DISABLE_CROSS_TESTS: 1 + - TARGET: x86_64-pc-windows-msvc + OTHER_TARGET: i686-pc-windows-msvc + BITS: 64 + ARCH: amd64 + MAKE_TARGETS: test-unit-x86_64-pc-windows-msvc install: - - IF "%MSVC%"=="" set PATH=C:\msys64\mingw%BITS%\bin;C:\msys64\usr\bin;%PATH% + - set PATH=C:\msys64\mingw%BITS%\bin;C:\msys64\usr\bin;%PATH% - if defined MINGW_URL appveyor DownloadFile %MINGW_URL%/%MINGW_ARCHIVE% - if defined MINGW_URL 7z x -y %MINGW_ARCHIVE% > nul - if defined MINGW_URL set PATH=%CD%\%MINGW_DIR%\bin;C:\msys64\usr\bin;%PATH% - - python src/etc/install-deps.py - - python src/etc/dl-snapshot.py %TARGET% - - SET PATH=%PATH%;%cd%/rustc/bin - - SET PATH=%PATH%;%cd%/target/snapshot/bin + - curl -sSf -o rustup-init.exe https://win.rustup.rs/ + - rustup-init.exe -y --default-host x86_64-pc-windows-msvc + - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin + - if NOT "%TARGET%" == "x86_64-pc-windows-msvc" rustup target add %TARGET% + - if defined OTHER_TARGET rustup target add %OTHER_TARGET% - rustc -V - cargo -V + - git submodule update --init build: false test_script: - - cargo test + - sh src/ci/run.sh %TARGET% + +cache: + - target + - C:\Users\appveyor\.cargo\registry + +after_test: + - mkdir %APPVEYOR_REPO_COMMIT% + - copy target\%TARGET%\release\dist\cargo-nightly-%TARGET%.tar.gz + %APPVEYOR_REPO_COMMIT% + +branches: + only: + - master + - auto-cargo + +artifacts: + - path: $(APPVEYOR_REPO_COMMIT)\cargo-nightly-$(TARGET).tar.gz + name: cargo + +deploy: + - provider: S3 + skip_cleanup: true + access_key_id: AKIAIWZDM2B2IJOWBGTA + secret_access_key: + secure: hyH54di5NyNdV+jjntM1dRN/NeUgDidwZmwcg4/UKpdJqGf1AAwYb2ulXYK67CXA + bucket: rust-lang-cargo-dev + set_public: true + region: us-west-1 + artifact: cargo + folder: cargo-master + on: + branch: auto-cargo diff --git a/configure b/configure index 66400d80cef..1ead582545e 100755 --- a/configure +++ b/configure @@ -270,9 +270,7 @@ need_cmd date need_cmd tr need_cmd sed need_cmd cmake -if [ "${OS}" != "Windows_NT" ]; then - need_cmd curl -fi +need_cmd make CFG_SRC_DIR="$(cd $(dirname $0) && pwd)/" CFG_BUILD_DIR="$(pwd)/" @@ -309,20 +307,20 @@ opt cross-tests 1 "run cross-compilation tests" valopt prefix "/usr/local" "set installation prefix" valopt local-rust-root "" "set prefix for local rust binary" +if [ ! -z "${CFG_LOCAL_RUST_ROOT}" ]; then + export LD_LIBRARY_PATH="${CFG_LOCAL_RUST_ROOT}/lib:$LD_LIBRARY_PATH" + export DYLD_LIBRARY_PATH="${CFG_LOCAL_RUST_ROOT}/lib:$DYLD_LIBRARY_PATH" + export PATH="${CFG_LOCAL_RUST_ROOT}/bin:$PATH" +fi + +valopt cargo "cargo" "cargo to bootstrap from" +valopt rustc "rustc" "rustc to compile with" +valopt rustdoc "rustdoc" "rustdoc to document with" + if [ $HELP -eq 0 ]; then - if [ ! -z "${CFG_LOCAL_RUST_ROOT}" ]; then - export LD_LIBRARY_PATH="${CFG_LOCAL_RUST_ROOT}/lib:$LD_LIBRARY_PATH" - export DYLD_LIBRARY_PATH="${CFG_LOCAL_RUST_ROOT}/lib:$DYLD_LIBRARY_PATH" - LRV=`${CFG_LOCAL_RUST_ROOT}/bin/rustc --version` - if [ $? -eq 0 ]; then - step_msg "using rustc at: ${CFG_LOCAL_RUST_ROOT} with version: $LRV" - else - err "failed to run rustc at: ${CFG_LOCAL_RUST_ROOT}" - fi - CFG_RUSTC="${CFG_LOCAL_RUST_ROOT}/bin/rustc" - else - probe_need CFG_RUSTC rustc - fi + probe_need CFG_CARGO $CFG_CARGO + probe_need CFG_RUSTC $CFG_RUSTC + probe_need CFG_RUSTDOC $CFG_RUSTDOC DEFAULT_BUILD=$("${CFG_RUSTC}" -vV | grep 'host: ' | sed 's/host: //') fi @@ -337,7 +335,6 @@ valopt infodir "${CFG_PREFIX}/share/info" "install additional info" valopt docdir "${CFG_PREFIX}/share/doc/cargo" "install extra docs" valopt mandir "${CFG_PREFIX}/share/man" "install man pages in PATH" valopt libdir "${CFG_PREFIX}/lib" "install libraries" -valopt local-cargo "" "local cargo to bootstrap from" if [ $HELP -eq 1 ] then @@ -354,15 +351,8 @@ fi step_msg "looking for build programs" -probe_need CFG_CURLORWGET curl wget -probe_need CFG_PYTHON python2.7 python2 python probe_need CFG_CC cc gcc clang - -if [ ! -z "${CFG_LOCAL_RUST_ROOT}" ]; then - CFG_RUSTDOC="${CFG_LOCAL_RUST_ROOT}/bin/rustdoc" -else - probe_need CFG_RUSTDOC rustdoc -fi +probe GIT git # a little post-processing of various config values CFG_PREFIX=${CFG_PREFIX%/} @@ -396,6 +386,7 @@ if [ "$CFG_SRC_DIR" != "$CFG_BUILD_DIR" ]; then fi if [ ! -z "$CFG_ENABLE_NIGHTLY" ]; then + need_cmd curl if [ ! -f .cargo/config ]; then mkdir -p .cargo cat > .cargo/config <<-EOF @@ -449,6 +440,8 @@ putvar CFG_MANDIR putvar CFG_LIBDIR putvar CFG_RUSTC putvar CFG_RUSTDOC +putvar CFG_CARGO +putvar CFG_GIT msg copy_if_changed ${CFG_SRC_DIR}Makefile.in ./Makefile diff --git a/src/bin/bench.rs b/src/bin/bench.rs index d5bc21dd46e..774b3003cf7 100644 --- a/src/bin/bench.rs +++ b/src/bin/bench.rs @@ -71,12 +71,12 @@ Compilation can be customized with the `bench` profile in the manifest. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let ops = ops::TestOptions { no_run: options.flag_no_run, no_fail_fast: false, @@ -102,8 +102,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { }, }; - let ws = try!(Workspace::new(&root, config)); - let err = try!(ops::run_benches(&ws, &ops, &options.arg_args)); + let ws = Workspace::new(&root, config)?; + let err = ops::run_benches(&ws, &ops, &options.arg_args)?; match err { None => Ok(None), Some(err) => { diff --git a/src/bin/build.rs b/src/bin/build.rs index eb6673c2f9a..7e6688b410f 100644 --- a/src/bin/build.rs +++ b/src/bin/build.rs @@ -69,13 +69,13 @@ the --release flag will use the `release` profile instead. pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-build; args={:?}", env::args().collect::>()); - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; let opts = CompileOptions { config: config, @@ -97,7 +97,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target_rustc_args: None, }; - let ws = try!(Workspace::new(&root, config)); - try!(ops::compile(&ws, &opts)); + let ws = Workspace::new(&root, config)?; + ops::compile(&ws, &opts)?; Ok(None) } diff --git a/src/bin/cargo.rs b/src/bin/cargo.rs index fdcea48c89f..05c88019b55 100644 --- a/src/bin/cargo.rs +++ b/src/bin/cargo.rs @@ -116,11 +116,11 @@ each_subcommand!(declare_mod); on this top-level information. */ fn execute(flags: Flags, config: &Config) -> CliResult> { - try!(config.configure(flags.flag_verbose, - flags.flag_quiet, - &flags.flag_color, - flags.flag_frozen, - flags.flag_locked)); + config.configure(flags.flag_verbose, + flags.flag_quiet, + &flags.flag_color, + flags.flag_frozen, + flags.flag_locked)?; init_git_transports(config); let _token = cargo::util::job::setup(); @@ -139,8 +139,8 @@ fn execute(flags: Flags, config: &Config) -> CliResult> { } if let Some(ref code) = flags.flag_explain { - let mut procss = try!(config.rustc()).process(); - try!(procss.arg("--explain").arg(code).exec().map_err(human)); + let mut procss = config.rustc()?.process(); + procss.arg("--explain").arg(code).exec().map_err(human)?; return Ok(None) } @@ -189,7 +189,7 @@ fn execute(flags: Flags, config: &Config) -> CliResult> { return Ok(None) } - let alias_list = try!(aliased_command(&config, &args[1])); + let alias_list = aliased_command(&config, &args[1])?; let args = match alias_list { Some(alias_command) => { let chain = args.iter().take(1) @@ -205,7 +205,7 @@ fn execute(flags: Flags, config: &Config) -> CliResult> { } None => args, }; - try!(execute_subcommand(config, &args[1], &args)); + execute_subcommand(config, &args[1], &args)?; Ok(None) } @@ -239,7 +239,7 @@ fn aliased_command(config: &Config, command: &String) -> CargoResult { - let value = try!(config.get_list(&alias_name)); + let value = config.get_list(&alias_name)?; if let Some(record) = value { let alias_commands: Vec = record.val.iter() .map(|s| s.0.to_string()).collect(); diff --git a/src/bin/clean.rs b/src/bin/clean.rs index c259e83c6f3..35146c687af 100644 --- a/src/bin/clean.rs +++ b/src/bin/clean.rs @@ -44,20 +44,20 @@ and its format, see the `cargo help pkgid` command. pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-clean; args={:?}", env::args().collect::>()); - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; let opts = ops::CleanOptions { config: config, spec: &options.flag_package, target: options.flag_target.as_ref().map(|s| &s[..]), release: options.flag_release, }; - let ws = try!(Workspace::new(&root, config)); - try!(ops::clean(&ws, &opts)); + let ws = Workspace::new(&root, config)?; + ops::clean(&ws, &opts)?; Ok(None) } diff --git a/src/bin/doc.rs b/src/bin/doc.rs index c250dde227c..f8b434247a9 100644 --- a/src/bin/doc.rs +++ b/src/bin/doc.rs @@ -62,13 +62,13 @@ the `cargo help pkgid` command. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; let empty = Vec::new(); let doc_opts = ops::DocOptions { @@ -96,7 +96,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { }, }; - let ws = try!(Workspace::new(&root, config)); - try!(ops::doc(&ws, &doc_opts)); + let ws = Workspace::new(&root, config)?; + ops::doc(&ws, &doc_opts)?; Ok(None) } diff --git a/src/bin/fetch.rs b/src/bin/fetch.rs index 1a970b71f05..877d53869a6 100644 --- a/src/bin/fetch.rs +++ b/src/bin/fetch.rs @@ -39,14 +39,14 @@ all updated. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); - let ws = try!(Workspace::new(&root, config)); - try!(ops::fetch(&ws)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + ops::fetch(&ws)?; Ok(None) } diff --git a/src/bin/generate_lockfile.rs b/src/bin/generate_lockfile.rs index 36057c1bb6e..40717435f04 100644 --- a/src/bin/generate_lockfile.rs +++ b/src/bin/generate_lockfile.rs @@ -33,14 +33,14 @@ Options: pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-generate-lockfile; args={:?}", env::args().collect::>()); - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; - let ws = try!(Workspace::new(&root, config)); - try!(ops::generate_lockfile(&ws)); + let ws = Workspace::new(&root, config)?; + ops::generate_lockfile(&ws)?; Ok(None) } diff --git a/src/bin/git_checkout.rs b/src/bin/git_checkout.rs index f7762483ea6..268776c7b74 100644 --- a/src/bin/git_checkout.rs +++ b/src/bin/git_checkout.rs @@ -30,21 +30,21 @@ Options: "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let Options { flag_url: url, flag_reference: reference, .. } = options; - let url = try!(url.to_url()); + let url = url.to_url()?; let reference = GitReference::Branch(reference.clone()); let source_id = SourceId::for_git(&url, reference); let mut source = GitSource::new(&source_id, config); - try!(source.update()); + source.update()?; Ok(None) } diff --git a/src/bin/init.rs b/src/bin/init.rs index 237d7663fb4..1b69a2204de 100644 --- a/src/bin/init.rs +++ b/src/bin/init.rs @@ -41,11 +41,11 @@ Options: pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-init; args={:?}", env::args().collect::>()); - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let Options { flag_bin, flag_lib, arg_path, flag_name, flag_vcs, .. } = options; @@ -57,11 +57,11 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { flag_name.as_ref().map(|s| s.as_ref())); let opts_lib = opts.lib; - try!(ops::init(opts, config)); + ops::init(opts, config)?; - try!(config.shell().status("Created", format!("{} project", - if opts_lib { "library" } - else {"binary (application)"}))); + config.shell().status("Created", format!("{} project", + if opts_lib { "library" } + else {"binary (application)"}))?; Ok(None) } diff --git a/src/bin/install.rs b/src/bin/install.rs index 319b9bdf6f4..c27db56ffaf 100644 --- a/src/bin/install.rs +++ b/src/bin/install.rs @@ -95,11 +95,11 @@ The `--list` option will list all installed packages (and their versions). "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let compile_opts = ops::CompileOptions { config: config, @@ -119,7 +119,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { }; let source = if let Some(url) = options.flag_git { - let url = try!(url.to_url()); + let url = url.to_url()?; let gitref = if let Some(branch) = options.flag_branch { GitReference::Branch(branch) } else if let Some(tag) = options.flag_tag { @@ -131,11 +131,11 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { }; SourceId::for_git(&url, gitref) } else if let Some(path) = options.flag_path { - try!(SourceId::for_path(&config.cwd().join(path))) + SourceId::for_path(&config.cwd().join(path))? } else if options.arg_crate == None { - try!(SourceId::for_path(&config.cwd())) + SourceId::for_path(&config.cwd())? } else { - try!(SourceId::crates_io(config)) + SourceId::crates_io(config)? }; let krate = options.arg_crate.as_ref().map(|s| &s[..]); @@ -143,9 +143,9 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { let root = options.flag_root.as_ref().map(|s| &s[..]); if options.flag_list { - try!(ops::install_list(root, config)); + ops::install_list(root, config)?; } else { - try!(ops::install(root, krate, &source, vers, &compile_opts, options.flag_force)); + ops::install(root, krate, &source, vers, &compile_opts, options.flag_force)?; } Ok(None) } diff --git a/src/bin/locate_project.rs b/src/bin/locate_project.rs index f162788fcbd..c9a352453f2 100644 --- a/src/bin/locate_project.rs +++ b/src/bin/locate_project.rs @@ -24,13 +24,13 @@ pub struct ProjectLocation { pub fn execute(flags: LocateProjectFlags, config: &Config) -> CliResult> { - let root = try!(find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd())); + let root = find_root_manifest_for_wd(flags.flag_manifest_path, config.cwd())?; - let string = try!(root.to_str() + let string = root.to_str() .chain_error(|| human("Your project path contains \ characters not representable in \ Unicode")) - .map_err(|e| CliError::new(e, 1))); + .map_err(|e| CliError::new(e, 1))?; Ok(Some(ProjectLocation { root: string.to_string() })) } diff --git a/src/bin/login.rs b/src/bin/login.rs index 53de98af66e..6bb3618da1f 100644 --- a/src/bin/login.rs +++ b/src/bin/login.rs @@ -35,31 +35,31 @@ Options: "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let token = match options.arg_token.clone() { Some(token) => token, None => { - let src = try!(SourceId::crates_io(config)); + let src = SourceId::crates_io(config)?; let mut src = RegistrySource::remote(&src, config); - try!(src.update()); - let config = try!(src.config()).unwrap(); + src.update()?; + let config = src.config()?.unwrap(); let host = options.flag_host.clone().unwrap_or(config.api); println!("please visit {}me and paste the API Token below", host); let mut line = String::new(); let input = io::stdin(); - try!(input.lock().read_line(&mut line).chain_error(|| { + input.lock().read_line(&mut line).chain_error(|| { human("failed to read stdin") - })); + })?; line } }; let token = token.trim().to_string(); - try!(ops::registry_login(config, token)); + ops::registry_login(config, token)?; Ok(None) } diff --git a/src/bin/metadata.rs b/src/bin/metadata.rs index 4553711045b..7971d0707b7 100644 --- a/src/bin/metadata.rs +++ b/src/bin/metadata.rs @@ -43,12 +43,12 @@ Options: "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); - let manifest = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; + let manifest = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; let options = OutputMetadataOptions { features: options.flag_features, @@ -58,7 +58,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult CliResult> { debug!("executing; cmd=cargo-new; args={:?}", env::args().collect::>()); - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let Options { flag_bin, flag_lib, arg_path, flag_name, flag_vcs, .. } = options; @@ -56,12 +56,12 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { flag_name.as_ref().map(|s| s.as_ref())); let opts_lib = opts.lib; - try!(ops::new(opts, config)); + ops::new(opts, config)?; - try!(config.shell().status("Created", format!("{} `{}` project", - if opts_lib { "library" } - else {"binary (application)"}, - arg_path))); + config.shell().status("Created", format!("{} `{}` project", + if opts_lib { "library" } + else {"binary (application)"}, + arg_path))?; Ok(None) } diff --git a/src/bin/owner.rs b/src/bin/owner.rs index 9b666adfe87..4c6976aa7d1 100644 --- a/src/bin/owner.rs +++ b/src/bin/owner.rs @@ -45,11 +45,11 @@ and troubleshooting. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let opts = ops::OwnersOptions { krate: options.arg_crate, token: options.flag_token, @@ -58,7 +58,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { to_remove: options.flag_remove, list: options.flag_list, }; - try!(ops::modify_owners(config, &opts)); + ops::modify_owners(config, &opts)?; Ok(None) } diff --git a/src/bin/package.rs b/src/bin/package.rs index 40eb7bac778..f3f95a99c64 100644 --- a/src/bin/package.rs +++ b/src/bin/package.rs @@ -40,20 +40,20 @@ Options: "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); - let ws = try!(Workspace::new(&root, config)); - try!(ops::package(&ws, &ops::PackageOpts { + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; + let ws = Workspace::new(&root, config)?; + ops::package(&ws, &ops::PackageOpts { config: config, verify: !options.flag_no_verify, list: options.flag_list, check_metadata: !options.flag_no_metadata, allow_dirty: options.flag_allow_dirty, jobs: options.flag_jobs, - })); + })?; Ok(None) } diff --git a/src/bin/pkgid.rs b/src/bin/pkgid.rs index f9d90523cea..e18a505bf58 100644 --- a/src/bin/pkgid.rs +++ b/src/bin/pkgid.rs @@ -54,13 +54,13 @@ Example Package IDs pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd())); - let ws = try!(Workspace::new(&root, config)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd())?; + let ws = Workspace::new(&root, config)?; let spec = if options.arg_spec.is_some() { options.arg_spec @@ -70,7 +70,7 @@ pub fn execute(options: Options, None }; let spec = spec.as_ref().map(|s| &s[..]); - let spec = try!(ops::pkgid(&ws, spec)); + let spec = ops::pkgid(&ws, spec)?; println!("{}", spec); Ok(None) } diff --git a/src/bin/publish.rs b/src/bin/publish.rs index 7c277046006..56db84d1766 100644 --- a/src/bin/publish.rs +++ b/src/bin/publish.rs @@ -43,11 +43,11 @@ Options: "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let Options { flag_token: token, flag_host: host, @@ -59,9 +59,9 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { .. } = options; - let root = try!(find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd())); - let ws = try!(Workspace::new(&root, config)); - try!(ops::publish(&ws, &ops::PublishOpts { + let root = find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd())?; + let ws = Workspace::new(&root, config)?; + ops::publish(&ws, &ops::PublishOpts { config: config, token: token, index: host, @@ -69,6 +69,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { allow_dirty: allow_dirty, jobs: jobs, dry_run: dry_run, - })); + })?; Ok(None) } diff --git a/src/bin/read_manifest.rs b/src/bin/read_manifest.rs index bd44b0ad581..dfff8a78e1b 100644 --- a/src/bin/read_manifest.rs +++ b/src/bin/read_manifest.rs @@ -28,10 +28,10 @@ Options: pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-read-manifest; args={:?}", env::args().collect::>()); - try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); + config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))?; - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; - let pkg = try!(Package::for_path(&root, config)); + let pkg = Package::for_path(&root, config)?; Ok(Some(pkg)) } diff --git a/src/bin/run.rs b/src/bin/run.rs index f9ce057319c..1e7089a7572 100644 --- a/src/bin/run.rs +++ b/src/bin/run.rs @@ -58,13 +58,13 @@ the ones before go to Cargo. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; let (mut examples, mut bins) = (Vec::new(), Vec::new()); if let Some(s) = options.flag_bin { @@ -97,8 +97,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target_rustc_args: None, }; - let ws = try!(Workspace::new(&root, config)); - match try!(ops::run(&ws, &compile_opts, &options.arg_args)) { + let ws = Workspace::new(&root, config)?; + match ops::run(&ws, &compile_opts, &options.arg_args)? { None => Ok(None), Some(err) => { // If we never actually spawned the process then that sounds pretty diff --git a/src/bin/rustc.rs b/src/bin/rustc.rs index 83103198496..a73088bfd64 100644 --- a/src/bin/rustc.rs +++ b/src/bin/rustc.rs @@ -76,14 +76,14 @@ processes spawned by Cargo, use the $RUSTFLAGS environment variable or the pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-rustc; args={:?}", env::args().collect::>()); - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, - config.cwd())); + let root = find_root_manifest_for_wd(options.flag_manifest_path, + config.cwd())?; let mode = match options.flag_profile.as_ref().map(|t| &t[..]) { Some("dev") | None => CompileMode::Build, Some("test") => CompileMode::Test, @@ -115,8 +115,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target_rustc_args: options.arg_opts.as_ref().map(|a| &a[..]), }; - let ws = try!(Workspace::new(&root, config)); - try!(ops::compile(&ws, &opts)); + let ws = Workspace::new(&root, config)?; + ops::compile(&ws, &opts)?; Ok(None) } diff --git a/src/bin/rustdoc.rs b/src/bin/rustdoc.rs index df3e4886ba1..0d159f47a8e 100644 --- a/src/bin/rustdoc.rs +++ b/src/bin/rustdoc.rs @@ -71,14 +71,14 @@ the `cargo help pkgid` command. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, - config.cwd())); + let root = find_root_manifest_for_wd(options.flag_manifest_path, + config.cwd())?; let doc_opts = ops::DocOptions { open_result: options.flag_open, @@ -103,8 +103,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { }, }; - let ws = try!(Workspace::new(&root, config)); - try!(ops::doc(&ws, &doc_opts)); + let ws = Workspace::new(&root, config)?; + ops::doc(&ws, &doc_opts)?; Ok(None) } diff --git a/src/bin/search.rs b/src/bin/search.rs index 829039aaa0e..eebe34988fb 100644 --- a/src/bin/search.rs +++ b/src/bin/search.rs @@ -34,11 +34,11 @@ Options: "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let Options { flag_host: host, flag_limit: limit, @@ -46,6 +46,6 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { .. } = options; - try!(ops::search(&query.join("+"), config, host, cmp::min(100, limit.unwrap_or(10)) as u8)); + ops::search(&query.join("+"), config, host, cmp::min(100, limit.unwrap_or(10)) as u8)?; Ok(None) } diff --git a/src/bin/test.rs b/src/bin/test.rs index ed487aa4226..c28ff8fc431 100644 --- a/src/bin/test.rs +++ b/src/bin/test.rs @@ -89,13 +89,13 @@ To get the list of all options available for the test binaries use this: "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; let empty = Vec::new(); let (mode, filter); @@ -132,8 +132,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { }, }; - let ws = try!(Workspace::new(&root, config)); - let err = try!(ops::run_tests(&ws, &ops, &options.arg_args)); + let ws = Workspace::new(&root, config)?; + let err = ops::run_tests(&ws, &ops, &options.arg_args)?; match err { None => Ok(None), Some(err) => { diff --git a/src/bin/uninstall.rs b/src/bin/uninstall.rs index 001abde41ef..b5c827e7a4f 100644 --- a/src/bin/uninstall.rs +++ b/src/bin/uninstall.rs @@ -38,14 +38,14 @@ only uninstall particular binaries. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; let root = options.flag_root.as_ref().map(|s| &s[..]); - try!(ops::uninstall(root, &options.arg_spec, &options.flag_bin, config)); + ops::uninstall(root, &options.arg_spec, &options.flag_bin, config)?; Ok(None) } diff --git a/src/bin/update.rs b/src/bin/update.rs index 6d1d7935b9c..6cf1e79864c 100644 --- a/src/bin/update.rs +++ b/src/bin/update.rs @@ -59,12 +59,12 @@ For more information about package id specifications, see `cargo help pkgid`. pub fn execute(options: Options, config: &Config) -> CliResult> { debug!("executing; cmd=cargo-update; args={:?}", env::args().collect::>()); - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); - let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; + let root = find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())?; let update_opts = ops::UpdateOptions { aggressive: options.flag_aggressive, @@ -73,7 +73,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { config: config, }; - let ws = try!(Workspace::new(&root, config)); - try!(ops::update_lockfile(&ws, &update_opts)); + let ws = Workspace::new(&root, config)?; + ops::update_lockfile(&ws, &update_opts)?; Ok(None) } diff --git a/src/bin/verify_project.rs b/src/bin/verify_project.rs index 27424f7a681..726e1ab358b 100644 --- a/src/bin/verify_project.rs +++ b/src/bin/verify_project.rs @@ -38,11 +38,11 @@ Options: "; pub fn execute(args: Flags, config: &Config) -> CliResult> { - try!(config.configure(args.flag_verbose, - args.flag_quiet, - &args.flag_color, - args.flag_frozen, - args.flag_locked)); + config.configure(args.flag_verbose, + args.flag_quiet, + &args.flag_color, + args.flag_frozen, + args.flag_locked)?; let mut contents = String::new(); let filename = args.flag_manifest_path.unwrap_or("Cargo.toml".into()); diff --git a/src/bin/yank.rs b/src/bin/yank.rs index 7971efbb37e..760e8cb80b0 100644 --- a/src/bin/yank.rs +++ b/src/bin/yank.rs @@ -43,17 +43,17 @@ crates to be locked to any yanked version. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - try!(config.configure(options.flag_verbose, - options.flag_quiet, - &options.flag_color, - options.flag_frozen, - options.flag_locked)); - try!(ops::yank(config, - options.arg_crate, - options.flag_vers, - options.flag_token, - options.flag_index, - options.flag_undo)); + config.configure(options.flag_verbose, + options.flag_quiet, + &options.flag_color, + options.flag_frozen, + options.flag_locked)?; + ops::yank(config, + options.arg_crate, + options.flag_vers, + options.flag_token, + options.flag_index, + options.flag_undo)?; Ok(None) } diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index b96fed73f1d..23cb71034b4 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -102,7 +102,7 @@ impl DependencyInner { deprecated_extra: Option<(&PackageId, &Config)>) -> CargoResult { let (specified_req, version_req) = match version { - Some(v) => (true, try!(DependencyInner::parse_with_deprecated(v, deprecated_extra))), + Some(v) => (true, DependencyInner::parse_with_deprecated(v, deprecated_extra)?), None => (false, VersionReq::any()) }; @@ -137,7 +137,7 @@ update to a fixed version or contact the upstream maintainer about this warning. ", req, inside.name(), inside.version(), requirement); - try!(config.shell().warn(&msg)); + config.shell().warn(&msg)?; Ok(requirement) } diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index da14b04fc33..03ed7d88729 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -72,9 +72,9 @@ impl Package { pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult { let path = manifest_path.parent().unwrap(); - let source_id = try!(SourceId::for_path(path)); - let (pkg, _) = try!(ops::read_package(&manifest_path, &source_id, - config)); + let source_id = SourceId::for_path(path)?; + let (pkg, _) = ops::read_package(&manifest_path, &source_id, + config)?; Ok(pkg) } @@ -157,20 +157,20 @@ impl<'cfg> PackageSet<'cfg> { } pub fn get(&self, id: &PackageId) -> CargoResult<&Package> { - let slot = try!(self.packages.iter().find(|p| p.0 == *id).chain_error(|| { + let slot = self.packages.iter().find(|p| p.0 == *id).chain_error(|| { internal(format!("couldn't find `{}` in package set", id)) - })); + })?; let slot = &slot.1; if let Some(pkg) = slot.borrow() { return Ok(pkg) } let mut sources = self.sources.borrow_mut(); - let source = try!(sources.get_mut(id.source_id()).chain_error(|| { + let source = sources.get_mut(id.source_id()).chain_error(|| { internal(format!("couldn't find source for `{}`", id)) - })); - let pkg = try!(source.download(id).chain_error(|| { + })?; + let pkg = source.download(id).chain_error(|| { human("unable to get packages from source") - })); + })?; assert!(slot.fill(pkg).is_ok()); Ok(slot.borrow().unwrap()) } diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs index b29b8ca11d5..8f1992a733e 100644 --- a/src/cargo/core/package_id.rs +++ b/src/cargo/core/package_id.rs @@ -36,21 +36,21 @@ impl Encodable for PackageId { impl Decodable for PackageId { fn decode(d: &mut D) -> Result { - let string: String = try!(Decodable::decode(d)); + let string: String = Decodable::decode(d)?; let regex = Regex::new(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$").unwrap(); - let captures = try!(regex.captures(&string).ok_or_else(|| { + let captures = regex.captures(&string).ok_or_else(|| { d.error("invalid serialized PackageId") - })); + })?; let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); let url = captures.at(3).unwrap(); - let version = try!(semver::Version::parse(version).map_err(|_| { + let version = semver::Version::parse(version).map_err(|_| { d.error("invalid version") - })); - let source_id = try!(SourceId::from_url(url).map_err(|e| { + })?; + let source_id = SourceId::from_url(url).map_err(|e| { d.error(&e.to_string()) - })); + })?; Ok(PackageId { inner: Arc::new(PackageIdInner { @@ -127,7 +127,7 @@ pub struct Metadata { impl PackageId { pub fn new(name: &str, version: T, sid: &SourceId) -> CargoResult { - let v = try!(version.to_semver().map_err(PackageIdError::InvalidVersion)); + let v = version.to_semver().map_err(PackageIdError::InvalidVersion)?; Ok(PackageId { inner: Arc::new(PackageIdInner { name: name.to_string(), @@ -179,10 +179,10 @@ impl Metadata { impl fmt::Display for PackageId { fn fmt(&self, f: &mut Formatter) -> fmt::Result { - try!(write!(f, "{} v{}", self.inner.name, self.inner.version)); + write!(f, "{} v{}", self.inner.name, self.inner.version)?; if !self.inner.source_id.is_default_registry() { - try!(write!(f, " ({})", self.inner.source_id)); + write!(f, " ({})", self.inner.source_id)?; } Ok(()) diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs index a07b69e49f7..2af87fb6c01 100644 --- a/src/cargo/core/package_id_spec.rs +++ b/src/cargo/core/package_id_spec.rs @@ -29,7 +29,7 @@ impl PackageIdSpec { let mut parts = spec.splitn(2, ':'); let name = parts.next().unwrap(); let version = match parts.next() { - Some(version) => Some(try!(Version::parse(version).map_err(human))), + Some(version) => Some(Version::parse(version).map_err(human)?), None => None, }; for ch in name.chars() { @@ -47,9 +47,9 @@ impl PackageIdSpec { pub fn query_str<'a, I>(spec: &str, i: I) -> CargoResult<&'a PackageId> where I: IntoIterator { - let spec = try!(PackageIdSpec::parse(spec).chain_error(|| { + let spec = PackageIdSpec::parse(spec).chain_error(|| { human(format!("invalid package id specification: `{}`", spec)) - })); + })?; spec.query(i) } @@ -68,20 +68,20 @@ impl PackageIdSpec { let frag = url.fragment().map(|s| s.to_owned()); url.set_fragment(None); let (name, version) = { - let mut path = try!(url.path_segments().chain_error(|| { + let mut path = url.path_segments().chain_error(|| { human(format!("pkgid urls must have a path: {}", url)) - })); - let path_name = try!(path.next_back().chain_error(|| { + })?; + let path_name = path.next_back().chain_error(|| { human(format!("pkgid urls must have at least one path \ component: {}", url)) - })); + })?; match frag { Some(fragment) => { let mut parts = fragment.splitn(2, ':'); let name_or_version = parts.next().unwrap(); match parts.next() { Some(part) => { - let version = try!(part.to_semver().map_err(human)); + let version = part.to_semver().map_err(human)?; (name_or_version.to_string(), Some(version)) } None => { @@ -89,8 +89,8 @@ impl PackageIdSpec { .is_alphabetic() { (name_or_version.to_string(), None) } else { - let version = try!(name_or_version.to_semver() - .map_err(human)); + let version = name_or_version.to_semver() + .map_err(human)?; (path_name.to_string(), Some(version)) } } @@ -180,20 +180,20 @@ impl fmt::Display for PackageIdSpec { match self.url { Some(ref url) => { if url.scheme() == "cargo" { - try!(write!(f, "{}{}", url.host().unwrap(), url.path())); + write!(f, "{}{}", url.host().unwrap(), url.path())?; } else { - try!(write!(f, "{}", url)); + write!(f, "{}", url)?; } if url.path_segments().unwrap().next_back().unwrap() != &self.name { printed_name = true; - try!(write!(f, "#{}", self.name)); + write!(f, "#{}", self.name)?; } } - None => { printed_name = true; try!(write!(f, "{}", self.name)) } + None => { printed_name = true; write!(f, "{}", self.name)? } } match self.version { Some(ref v) => { - try!(write!(f, "{}{}", if printed_name {":"} else {"#"}, v)); + write!(f, "{}{}", if printed_name {":"} else {"#"}, v)?; } None => {} } diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 5b7c21ac35e..037b87a8b62 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -91,7 +91,7 @@ enum Kind { impl<'cfg> PackageRegistry<'cfg> { pub fn new(config: &'cfg Config) -> CargoResult> { - let source_config = try!(SourceConfigMap::new(config)); + let source_config = SourceConfigMap::new(config)?; Ok(PackageRegistry { sources: SourceMap::new(), source_ids: HashMap::new(), @@ -138,13 +138,13 @@ impl<'cfg> PackageRegistry<'cfg> { } } - try!(self.load(namespace, kind)); + self.load(namespace, kind)?; Ok(()) } pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> { for id in ids.iter() { - try!(self.ensure_loaded(id, Kind::Locked)); + self.ensure_loaded(id, Kind::Locked)?; } Ok(()) } @@ -178,7 +178,7 @@ impl<'cfg> PackageRegistry<'cfg> { fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> { (|| { - let source = try!(self.source_config.load(source_id)); + let source = self.source_config.load(source_id)?; if kind == Kind::Override { self.overrides.push(source_id.clone()); @@ -196,7 +196,7 @@ impl<'cfg> PackageRegistry<'cfg> { for s in self.overrides.iter() { let src = self.sources.get_mut(s).unwrap(); let dep = Dependency::new_override(dep.name(), s); - let mut results = try!(src.query(&dep)); + let mut results = src.query(&dep)?; if results.len() > 0 { return Ok(Some(results.remove(0))) } @@ -291,17 +291,17 @@ impl<'cfg> PackageRegistry<'cfg> { override_summary: &Summary, real_summary: &Summary) -> CargoResult<()> { let real = real_summary.package_id(); - let map = try!(self.locked.get(real.source_id()).chain_error(|| { + let map = self.locked.get(real.source_id()).chain_error(|| { human(format!("failed to find lock source of {}", real)) - })); - let list = try!(map.get(real.name()).chain_error(|| { + })?; + let list = map.get(real.name()).chain_error(|| { human(format!("failed to find lock name of {}", real)) - })); - let &(_, ref real_deps) = try!(list.iter().find(|&&(ref id, _)| { + })?; + let &(_, ref real_deps) = list.iter().find(|&&(ref id, _)| { real == id }).chain_error(|| { human(format!("failed to find lock version of {}", real)) - })); + })?; let mut real_deps = real_deps.clone(); let boilerplate = "\ @@ -327,7 +327,7 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies dependencies; the dependency on `{}` was either added or\n\ modified to not match the previously resolved version\n\n\ {}", override_summary.package_id().name(), dep.name(), boilerplate); - try!(self.source_config.config().shell().warn(&msg)); + self.source_config.config().shell().warn(&msg)?; return Ok(()) } @@ -336,7 +336,7 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies path override for crate `{}` has altered the original list of dependencies; the dependency on `{}` was removed\n\n {}", override_summary.package_id().name(), id.name(), boilerplate); - try!(self.source_config.config().shell().warn(&msg)); + self.source_config.config().shell().warn(&msg)?; return Ok(()) } @@ -347,14 +347,14 @@ http://doc.crates.io/specifying-dependencies.html#overriding-dependencies impl<'cfg> Registry for PackageRegistry<'cfg> { fn query(&mut self, dep: &Dependency) -> CargoResult> { // Ensure the requested source_id is loaded - try!(self.ensure_loaded(dep.source_id(), Kind::Normal).chain_error(|| { + self.ensure_loaded(dep.source_id(), Kind::Normal).chain_error(|| { human(format!("failed to load source for a dependency \ on `{}`", dep.name())) - })); + })?; - let override_summary = try!(self.query_overrides(&dep)); + let override_summary = self.query_overrides(&dep)?; let real_summaries = match self.sources.get_mut(dep.source_id()) { - Some(src) => Some(try!(src.query(&dep))), + Some(src) => Some(src.query(&dep)?), None => None, }; @@ -363,7 +363,7 @@ impl<'cfg> Registry for PackageRegistry<'cfg> { if summaries.len() != 1 { bail!("found an override with a non-locked list"); } - try!(self.warn_bad_override(&candidate, &summaries[0])); + self.warn_bad_override(&candidate, &summaries[0])?; vec![candidate] } (Some(_), None) => bail!("override found but no real ones"), diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 2e67ef6e5c7..47c20b89bea 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -52,7 +52,7 @@ impl EncodableResolve { // We failed to find a local package in the workspace. // It must have been removed and should be ignored. None => continue, - Some(source) => try!(PackageId::new(&pkg.name, &pkg.version, source)) + Some(source) => PackageId::new(&pkg.name, &pkg.version, source)? }; assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none()) @@ -88,7 +88,7 @@ impl EncodableResolve { }; for edge in deps.iter() { - if let Some(to_depend_on) = try!(lookup_id(edge)) { + if let Some(to_depend_on) = lookup_id(edge)? { g.link(id.clone(), to_depend_on); } } @@ -101,7 +101,7 @@ impl EncodableResolve { for &(ref id, ref pkg) in live_pkgs.values() { if let Some(ref replace) = pkg.replace { assert!(pkg.dependencies.is_none()); - if let Some(replace_id) = try!(lookup_id(replace)) { + if let Some(replace_id) = lookup_id(replace)? { replacements.insert(id.clone(), replace_id); } } @@ -132,9 +132,9 @@ impl EncodableResolve { for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { to_remove.push(k.to_string()); let k = &k[prefix.len()..]; - let enc_id: EncodablePackageId = try!(k.parse().chain_error(|| { + let enc_id: EncodablePackageId = k.parse().chain_error(|| { internal("invalid encoding of checksum in lockfile") - })); + })?; let id = match lookup_id(&enc_id) { Ok(Some(id)) => id, _ => continue, @@ -222,9 +222,9 @@ pub struct EncodablePackageId { impl fmt::Display for EncodablePackageId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "{} {}", self.name, self.version)); + write!(f, "{} {}", self.name, self.version)?; if let Some(ref s) = self.source { - try!(write!(f, " ({})", s.to_url())); + write!(f, " ({})", s.to_url())?; } Ok(()) } @@ -235,15 +235,15 @@ impl FromStr for EncodablePackageId { fn from_str(s: &str) -> CargoResult { let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); - let captures = try!(regex.captures(s).ok_or_else(|| { + let captures = regex.captures(s).ok_or_else(|| { internal("invalid serialized PackageId") - })); + })?; let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); let source_id = match captures.at(3) { - Some(s) => Some(try!(SourceId::from_url(s))), + Some(s) => Some(SourceId::from_url(s)?), None => None, }; diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 9cc723d69dc..ad8f9680218 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -98,11 +98,6 @@ pub enum Method<'a> { }, } -// Err(..) == standard transient error (e.g. I/O error) -// Ok(Err(..)) == resolve error, but is human readable -// Ok(Ok(..)) == success in resolving -type ResolveResult<'a> = CargoResult>>>; - // Information about the dependencies for a crate, a tuple of: // // (dependency info, candidates, features activated) @@ -226,10 +221,10 @@ unable to verify that `{0}` is the same as when the lockfile was generated impl fmt::Debug for Resolve { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - try!(write!(fmt, "graph: {:?}\n", self.graph)); - try!(write!(fmt, "\nfeatures: {{\n")); + write!(fmt, "graph: {:?}\n", self.graph)?; + write!(fmt, "\nfeatures: {{\n")?; for (pkg, features) in &self.features { - try!(write!(fmt, " {}: {:?}\n", pkg, features)); + write!(fmt, " {}: {:?}\n", pkg, features)?; } write!(fmt, "}}") } @@ -274,7 +269,7 @@ pub fn resolve(summaries: &[(Summary, Method)], replacements: replacements, }; let _p = profile::start(format!("resolving")); - let cx = try!(activate_deps_loop(cx, registry, summaries)); + let cx = activate_deps_loop(cx, registry, summaries)?; let mut resolve = Resolve { graph: cx.resolve_graph, @@ -289,7 +284,7 @@ pub fn resolve(summaries: &[(Summary, Method)], resolve.checksums.insert(summary.package_id().clone(), cksum); } - try!(check_cycles(&resolve, &cx.activations)); + check_cycles(&resolve, &cx.activations)?; trace!("resolved: {:?}", resolve); Ok(resolve) @@ -333,7 +328,7 @@ fn activate(cx: &mut Context, } }; - let deps = try!(cx.build_deps(registry, &candidate, method)); + let deps = cx.build_deps(registry, &candidate, method)?; Ok(Some(DepsFrame { parent: candidate, @@ -450,8 +445,8 @@ fn activate_deps_loop<'a>(mut cx: Context<'a>, debug!("initial activation: {}", summary.package_id()); let summary = Rc::new(summary.clone()); let candidate = Candidate { summary: summary, replace: None }; - remaining_deps.extend(try!(activate(&mut cx, registry, None, candidate, - method))); + remaining_deps.extend(activate(&mut cx, registry, None, candidate, + method)?); } // Main resolution loop, this is the workhorse of the resolution algorithm. @@ -558,8 +553,8 @@ fn activate_deps_loop<'a>(mut cx: Context<'a>, }; trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(), candidate.summary.version()); - remaining_deps.extend(try!(activate(&mut cx, registry, Some(&parent), - candidate, &method))); + remaining_deps.extend(activate(&mut cx, registry, Some(&parent), + candidate, &method)?); } Ok(cx) @@ -709,16 +704,16 @@ fn build_features(s: &Summary, method: &Method) match *method { Method::Everything => { for key in s.features().keys() { - try!(add_feature(s, key, &mut deps, &mut used, &mut visited)); + add_feature(s, key, &mut deps, &mut used, &mut visited)?; } for dep in s.dependencies().iter().filter(|d| d.is_optional()) { - try!(add_feature(s, dep.name(), &mut deps, &mut used, - &mut visited)); + add_feature(s, dep.name(), &mut deps, &mut used, + &mut visited)?; } } Method::Required { features: requested_features, .. } => { for feat in requested_features.iter() { - try!(add_feature(s, feat, &mut deps, &mut used, &mut visited)); + add_feature(s, feat, &mut deps, &mut used, &mut visited)?; } } } @@ -726,8 +721,8 @@ fn build_features(s: &Summary, method: &Method) Method::Everything | Method::Required { uses_default_features: true, .. } => { if s.features().get("default").is_some() { - try!(add_feature(s, "default", &mut deps, &mut used, - &mut visited)); + add_feature(s, "default", &mut deps, &mut used, + &mut visited)?; } } Method::Required { uses_default_features: false, .. } => {} @@ -765,7 +760,7 @@ fn build_features(s: &Summary, method: &Method) match s.features().get(feat) { Some(recursive) => { for f in recursive { - try!(add_feature(s, f, deps, used, visited)); + add_feature(s, f, deps, used, visited)?; } } None => { @@ -820,19 +815,19 @@ impl<'a> Context<'a> { // First, figure out our set of dependencies based on the requsted set // of features. This also calculates what features we're going to enable // for our own dependencies. - let deps = try!(self.resolve_features(candidate, method)); + let deps = self.resolve_features(candidate, method)?; // Next, transform all dependencies into a list of possible candidates // which can satisfy that dependency. - let mut deps = try!(deps.into_iter().map(|(dep, features)| { - let mut candidates = try!(self.query(registry, &dep)); + let mut deps = deps.into_iter().map(|(dep, features)| { + let mut candidates = self.query(registry, &dep)?; // When we attempt versions for a package, we'll want to start at // the maximum version and work our way down. candidates.sort_by(|a, b| { b.summary.version().cmp(a.summary.version()) }); Ok((dep, candidates, features)) - }).collect::>>()); + }).collect::>>()?; // Attempt to resolve dependencies with fewer candidates before trying // dependencies with more candidates. This way if the dependency with @@ -852,7 +847,7 @@ impl<'a> Context<'a> { fn query(&self, registry: &mut Registry, dep: &Dependency) -> CargoResult> { - let summaries = try!(registry.query(dep)); + let summaries = registry.query(dep)?; summaries.into_iter().map(Rc::new).map(|summary| { // get around lack of non-lexical lifetimes let summary2 = summary.clone(); @@ -866,13 +861,13 @@ impl<'a> Context<'a> { }; debug!("found an override for {} {}", dep.name(), dep.version_req()); - let mut summaries = try!(registry.query(dep)).into_iter(); - let s = try!(summaries.next().chain_error(|| { + let mut summaries = registry.query(dep)?.into_iter(); + let s = summaries.next().chain_error(|| { human(format!("no matching package for override `{}` found\n\ location searched: {}\n\ version required: {}", spec, dep.source_id(), dep.version_req())) - })); + })?; let summaries = summaries.collect::>(); if summaries.len() > 0 { let bullets = summaries.iter().map(|s| { @@ -928,8 +923,8 @@ impl<'a> Context<'a> { let deps = candidate.dependencies(); let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps); - let (mut feature_deps, used_features) = try!(build_features(candidate, - method)); + let (mut feature_deps, used_features) = build_features(candidate, + method)?; let mut ret = Vec::new(); // Next, sanitize all requested features by whitelisting all the @@ -988,11 +983,11 @@ fn check_cycles(resolve: &Resolve, let mut checked = HashSet::new(); for pkg in all_packages { if !checked.contains(pkg) { - try!(visit(resolve, - pkg, - &summaries, - &mut HashSet::new(), - &mut checked)) + visit(resolve, + pkg, + &summaries, + &mut HashSet::new(), + &mut checked)? } } return Ok(()); @@ -1024,7 +1019,7 @@ fn check_cycles(resolve: &Resolve, }); let mut empty = HashSet::new(); let visited = if is_transitive {&mut *visited} else {&mut empty}; - try!(visit(resolve, dep, summaries, visited, checked)); + visit(resolve, dep, summaries, visited, checked)?; } } diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index b7996a36a1e..a2830787467 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -163,7 +163,7 @@ impl Shell { fn get_term(out: Box) -> CargoResult { // Check if the creation of a console will succeed if ::term::WinConsole::new(vec![0u8; 0]).is_ok() { - let t = try!(::term::WinConsole::new(out)); + let t = ::term::WinConsole::new(out)?; if !t.supports_color() { Ok(NoColor(Box::new(t))) } else { @@ -206,11 +206,11 @@ impl Shell { } pub fn say(&mut self, message: T, color: Color) -> CargoResult<()> { - try!(self.reset()); - if color != BLACK { try!(self.fg(color)); } - try!(write!(self, "{}\n", message.to_string())); - try!(self.reset()); - try!(self.flush()); + self.reset()?; + if color != BLACK { self.fg(color)?; } + write!(self, "{}\n", message.to_string())?; + self.reset()?; + self.flush()?; Ok(()) } @@ -222,17 +222,17 @@ impl Shell { -> CargoResult<()> where T: fmt::Display, U: fmt::Display { - try!(self.reset()); - if color != BLACK { try!(self.fg(color)); } - if self.supports_attr(Attr::Bold) { try!(self.attr(Attr::Bold)); } + self.reset()?; + if color != BLACK { self.fg(color)?; } + if self.supports_attr(Attr::Bold) { self.attr(Attr::Bold)?; } if justified { - try!(write!(self, "{:>12}", status.to_string())); + write!(self, "{:>12}", status.to_string())?; } else { - try!(write!(self, "{}", status)); + write!(self, "{}", status)?; } - try!(self.reset()); - try!(write!(self, " {}\n", message)); - try!(self.flush()); + self.reset()?; + write!(self, " {}\n", message)?; + self.flush()?; Ok(()) } @@ -240,7 +240,7 @@ impl Shell { let colored = self.colored(); match self.terminal { - Colored(ref mut c) if colored => try!(c.fg(color)), + Colored(ref mut c) if colored => c.fg(color)?, _ => return Ok(false), } Ok(true) @@ -250,7 +250,7 @@ impl Shell { let colored = self.colored(); match self.terminal { - Colored(ref mut c) if colored => try!(c.attr(attr)), + Colored(ref mut c) if colored => c.attr(attr)?, _ => return Ok(false) } Ok(true) @@ -269,7 +269,7 @@ impl Shell { let colored = self.colored(); match self.terminal { - Colored(ref mut c) if colored => try!(c.reset()), + Colored(ref mut c) if colored => c.reset()?, _ => () } Ok(()) diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs index 4a46c19294d..91d2acff789 100644 --- a/src/cargo/core/source.rs +++ b/src/cargo/core/source.rs @@ -131,11 +131,11 @@ impl SourceId { pub fn from_url(string: &str) -> CargoResult { let mut parts = string.splitn(2, '+'); let kind = parts.next().unwrap(); - let url = try!(parts.next().ok_or(human(format!("invalid source `{}`", string)))); + let url = parts.next().ok_or(human(format!("invalid source `{}`", string)))?; match kind { "git" => { - let mut url = try!(url.to_url()); + let mut url = url.to_url()?; let mut reference = GitReference::Branch("master".to_string()); for (k, v) in url.query_pairs() { match &k[..] { @@ -154,12 +154,12 @@ impl SourceId { Ok(SourceId::for_git(&url, reference).with_precise(precise)) }, "registry" => { - let url = try!(url.to_url()); + let url = url.to_url()?; Ok(SourceId::new(Kind::Registry, url) .with_precise(Some("locked".to_string()))) } "path" => { - let url = try!(url.to_url()); + let url = url.to_url()?; Ok(SourceId::new(Kind::Path, url)) } kind => Err(human(format!("unsupported source protocol: {}", kind))) @@ -198,7 +198,7 @@ impl SourceId { // Pass absolute path pub fn for_path(path: &Path) -> CargoResult { - let url = try!(path.to_url()); + let url = path.to_url()?; Ok(SourceId::new(Kind::Path, url)) } @@ -211,12 +211,12 @@ impl SourceId { } pub fn for_local_registry(path: &Path) -> CargoResult { - let url = try!(path.to_url()); + let url = path.to_url()?; Ok(SourceId::new(Kind::LocalRegistry, url)) } pub fn for_directory(path: &Path) -> CargoResult { - let url = try!(path.to_url()); + let url = path.to_url()?; Ok(SourceId::new(Kind::Directory, url)) } @@ -225,20 +225,20 @@ impl SourceId { /// This is the main cargo registry by default, but it can be overridden in /// a `.cargo/config`. pub fn crates_io(config: &Config) -> CargoResult { - let cfg = try!(ops::registry_configuration(config)); + let cfg = ops::registry_configuration(config)?; let url = if let Some(ref index) = cfg.index { static WARNED: AtomicBool = ATOMIC_BOOL_INIT; if !WARNED.swap(true, SeqCst) { - try!(config.shell().warn("custom registry support via \ + config.shell().warn("custom registry support via \ the `registry.index` configuration is \ being removed, this functionality \ - will not work in the future")); + will not work in the future")?; } &index[..] } else { CRATES_IO }; - let url = try!(url.to_url()); + let url = url.to_url()?; Ok(SourceId::for_registry(&url)) } @@ -348,7 +348,7 @@ impl Encodable for SourceId { impl Decodable for SourceId { fn decode(d: &mut D) -> Result { - let string: String = try!(Decodable::decode(d)); + let string: String = Decodable::decode(d)?; SourceId::from_url(&string).map_err(|e| { d.error(&e.to_string()) }) @@ -363,11 +363,11 @@ impl fmt::Display for SourceId { } SourceIdInner { kind: Kind::Git(ref reference), ref url, ref precise, .. } => { - try!(write!(f, "{}{}", url, reference.url_ref())); + write!(f, "{}{}", url, reference.url_ref())?; if let Some(ref s) = *precise { let len = cmp::min(s.len(), 8); - try!(write!(f, "#{}", &s[..len])); + write!(f, "#{}", &s[..len])?; } Ok(()) } diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index 65f1537647c..f35432f6469 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -82,7 +82,7 @@ impl<'cfg> Workspace<'cfg> { /// before returning it, so `Ok` is only returned for valid workspaces. pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult> { - let target_dir = try!(config.target_dir()); + let target_dir = config.target_dir()?; let mut ws = Workspace { config: config, @@ -95,9 +95,9 @@ impl<'cfg> Workspace<'cfg> { target_dir: target_dir, members: Vec::new(), }; - ws.root_manifest = try!(ws.find_root(manifest_path)); - try!(ws.find_members()); - try!(ws.validate()); + ws.root_manifest = ws.find_root(manifest_path)?; + ws.find_members()?; + ws.validate()?; Ok(ws) } @@ -130,7 +130,7 @@ impl<'cfg> Workspace<'cfg> { ws.target_dir = if let Some(dir) = target_dir { Some(dir) } else { - try!(ws.config.target_dir()) + ws.config.target_dir()? }; ws.members.push(ws.current_manifest.clone()); } @@ -221,7 +221,7 @@ impl<'cfg> Workspace<'cfg> { fn find_root(&mut self, manifest_path: &Path) -> CargoResult> { { - let current = try!(self.packages.load(&manifest_path)); + let current = self.packages.load(&manifest_path)?; match *current.workspace_config() { WorkspaceConfig::Root { .. } => { debug!("find_root - is root {}", manifest_path.display()); @@ -274,7 +274,7 @@ impl<'cfg> Workspace<'cfg> { } }; let members = { - let root = try!(self.packages.load(&root_manifest)); + let root = self.packages.load(&root_manifest)?; match *root.workspace_config() { WorkspaceConfig::Root { ref members } => members.clone(), _ => bail!("root of a workspace inferred but wasn't a root: {}", @@ -286,7 +286,7 @@ impl<'cfg> Workspace<'cfg> { let root = root_manifest.parent().unwrap(); for path in list { let manifest_path = root.join(path).join("Cargo.toml"); - try!(self.find_path_deps(&manifest_path)); + self.find_path_deps(&manifest_path)?; } } @@ -302,7 +302,7 @@ impl<'cfg> Workspace<'cfg> { self.members.push(manifest_path.to_path_buf()); let candidates = { - let pkg = match *try!(self.packages.load(manifest_path)) { + let pkg = match *self.packages.load(manifest_path)? { MaybePackage::Package(ref p) => p, MaybePackage::Virtual(_) => return Ok(()), }; @@ -315,7 +315,7 @@ impl<'cfg> Workspace<'cfg> { .collect::>() }; for candidate in candidates { - try!(self.find_path_deps(&candidate)); + self.find_path_deps(&candidate)?; } Ok(()) } @@ -373,7 +373,7 @@ impl<'cfg> Workspace<'cfg> { } for member in self.members.clone() { - let root = try!(self.find_root(&member)); + let root = self.find_root(&member)?; if root == self.root_manifest { continue } @@ -462,7 +462,7 @@ impl<'cfg> Workspace<'cfg> { root_manifest.display()); //TODO: remove `Eq` bound from `Profiles` when the warning is removed. - try!(self.config.shell().warn(&message)); + self.config.shell().warn(&message)?; } } } @@ -481,9 +481,9 @@ impl<'cfg> Packages<'cfg> { match self.packages.entry(key.to_path_buf()) { Entry::Occupied(e) => Ok(e.into_mut()), Entry::Vacant(v) => { - let source_id = try!(SourceId::for_path(key)); - let pair = try!(ops::read_manifest(&manifest_path, &source_id, - self.config)); + let source_id = SourceId::for_path(key)?; + let pair = ops::read_manifest(&manifest_path, &source_id, + self.config)?; let (manifest, _nested_paths) = pair; Ok(v.insert(match manifest { EitherManifest::Real(manifest) => { diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index d1a8f908866..33098ee1749 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -67,7 +67,7 @@ pub fn call_main_without_stdin( options_first: bool) -> CliResult> where V: Encodable, T: Decodable { - let flags = try!(flags_from_args::(usage, args, options_first)); + let flags = flags_from_args::(usage, args, options_first)?; exec(flags, config) } @@ -77,7 +77,7 @@ fn process(mut callback: F) { let mut config = None; let result = (|| { - config = Some(try!(Config::default())); + config = Some(Config::default()?); let args: Vec<_> = try!(env::args_os().map(|s| { s.into_string().map_err(|s| { human(format!("invalid unicode in argument: {:?}", s)) diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index 738417a4bf8..99b45f7c20a 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -28,26 +28,26 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> { return rm_rf(&target_dir); } - let mut registry = try!(PackageRegistry::new(opts.config)); - let resolve = try!(ops::resolve_ws(&mut registry, ws)); + let mut registry = PackageRegistry::new(opts.config)?; + let resolve = ops::resolve_ws(&mut registry, ws)?; let packages = ops::get_resolved_packages(&resolve, registry); let profiles = ws.profiles(); - let host_triple = try!(opts.config.rustc()).host.clone(); - let mut cx = try!(Context::new(ws, &resolve, &packages, opts.config, + let host_triple = opts.config.rustc()?.host.clone(); + let mut cx = Context::new(ws, &resolve, &packages, opts.config, BuildConfig { host_triple: host_triple, requested_target: opts.target.map(|s| s.to_owned()), release: opts.release, ..BuildConfig::default() }, - profiles)); + profiles)?; let mut units = Vec::new(); for spec in opts.spec { // Translate the spec to a Package - let pkgid = try!(resolve.query(spec)); - let pkg = try!(packages.get(&pkgid)); + let pkgid = resolve.query(spec)?; + let pkg = packages.get(&pkgid)?; // Generate all relevant `Unit` targets for this package for target in pkg.targets() { @@ -70,17 +70,17 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> { } } - try!(cx.probe_target_info(&units)); + cx.probe_target_info(&units)?; for unit in units.iter() { let layout = cx.layout(unit); - try!(rm_rf(&layout.proxy().fingerprint(&unit.pkg))); - try!(rm_rf(&layout.build(&unit.pkg))); + rm_rf(&layout.proxy().fingerprint(&unit.pkg))?; + rm_rf(&layout.build(&unit.pkg))?; - for (src, link_dst, _) in try!(cx.target_filenames(&unit)) { - try!(rm_rf(&src)); + for (src, link_dst, _) in cx.target_filenames(&unit)? { + rm_rf(&src)?; if let Some(dst) = link_dst { - try!(rm_rf(&dst)); + rm_rf(&dst)?; } } } @@ -91,13 +91,13 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> { fn rm_rf(path: &Path) -> CargoResult<()> { let m = fs::metadata(path); if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { - try!(fs::remove_dir_all(path).chain_error(|| { + fs::remove_dir_all(path).chain_error(|| { human("could not remove build directory") - })); + })?; } else if m.is_ok() { - try!(fs::remove_file(path).chain_error(|| { + fs::remove_file(path).chain_error(|| { human("failed to remove build artifact") - })); + })?; } Ok(()) } diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index bec83524419..74b78769476 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -92,8 +92,8 @@ pub enum CompileFilter<'a> { pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions<'a>) -> CargoResult> { - for key in try!(ws.current()).manifest().warnings().iter() { - try!(options.config.shell().warn(key)) + for key in ws.current()?.manifest().warnings().iter() { + options.config.shell().warn(key)? } compile_ws(ws, None, options) } @@ -103,29 +103,29 @@ pub fn resolve_dependencies<'a>(ws: &Workspace<'a>, features: &[String], all_features: bool, no_default_features: bool, - spec: &'a [String]) + specs: &[PackageIdSpec]) -> CargoResult<(PackageSet<'a>, Resolve)> { let features = features.iter().flat_map(|s| { s.split_whitespace() }).map(|s| s.to_string()).collect::>(); - let mut registry = try!(PackageRegistry::new(ws.config())); + let mut registry = PackageRegistry::new(ws.config())?; if let Some(source) = source { - registry.add_preloaded(try!(ws.current()).package_id().source_id(), + registry.add_preloaded(ws.current()?.package_id().source_id(), source); } // First, resolve the root_package's *listed* dependencies, as well as // downloading and updating all remotes and such. - let resolve = try!(ops::resolve_ws(&mut registry, ws)); + let resolve = ops::resolve_ws(&mut registry, ws)?; // Second, resolve with precisely what we're doing. Filter out // transitive dependencies if necessary, specify features, handle // overrides, etc. let _p = profile::start("resolving w/ overrides..."); - try!(add_overrides(&mut registry, ws)); + add_overrides(&mut registry, ws)?; let method = if all_features { Method::Everything @@ -137,13 +137,10 @@ pub fn resolve_dependencies<'a>(ws: &Workspace<'a>, } }; - let specs = try!(spec.iter().map(|p| PackageIdSpec::parse(p)) - .collect::>>()); - let resolved_with_overrides = - try!(ops::resolve_with_previous(&mut registry, ws, + ops::resolve_with_previous(&mut registry, ws, method, Some(&resolve), None, - &specs)); + specs)?; let packages = ops::get_resolved_packages(&resolved_with_overrides, registry); @@ -155,7 +152,7 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>, source: Option>, options: &CompileOptions<'a>) -> CargoResult> { - let root_package = try!(ws.current()); + let root_package = ws.current()?; let CompileOptions { config, jobs, target, spec, features, all_features, no_default_features, release, mode, message_format, @@ -171,24 +168,32 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>, let profiles = ws.profiles(); if spec.len() == 0 { - try!(generate_targets(root_package, profiles, mode, filter, release)); + generate_targets(root_package, profiles, mode, filter, release)?; } - let (packages, resolve_with_overrides) = - try!(resolve_dependencies(ws, source, features, all_features, no_default_features, spec)); + let specs = spec.iter().map(|p| PackageIdSpec::parse(p)) + .collect::>>()?; + + let pair = resolve_dependencies(ws, + source, + features, + all_features, + no_default_features, + &specs)?; + let (packages, resolve_with_overrides) = pair; let mut pkgids = Vec::new(); if spec.len() > 0 { for p in spec { - pkgids.push(try!(resolve_with_overrides.query(&p))); + pkgids.push(resolve_with_overrides.query(&p)?); } } else { pkgids.push(root_package.package_id()); }; - let to_builds = try!(pkgids.iter().map(|id| { + let to_builds = pkgids.iter().map(|id| { packages.get(id) - }).collect::>>()); + }).collect::>>()?; let mut general_targets = Vec::new(); let mut package_targets = Vec::new(); @@ -199,8 +204,8 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>, panic!("`rustc` and `rustdoc` should not accept multiple `-p` flags") } (Some(args), _) => { - let targets = try!(generate_targets(to_builds[0], profiles, - mode, filter, release)); + let targets = generate_targets(to_builds[0], profiles, + mode, filter, release)?; if targets.len() == 1 { let (target, profile) = targets[0]; let mut profile = profile.clone(); @@ -213,8 +218,8 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>, } } (None, Some(args)) => { - let targets = try!(generate_targets(to_builds[0], profiles, - mode, filter, release)); + let targets = generate_targets(to_builds[0], profiles, + mode, filter, release)?; if targets.len() == 1 { let (target, profile) = targets[0]; let mut profile = profile.clone(); @@ -228,8 +233,8 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>, } (None, None) => { for &to_build in to_builds.iter() { - let targets = try!(generate_targets(to_build, profiles, mode, - filter, release)); + let targets = generate_targets(to_build, profiles, mode, + filter, release)?; package_targets.push((to_build, targets)); } } @@ -243,7 +248,7 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>, let mut ret = { let _p = profile::start("compiling"); - let mut build_config = try!(scrape_build_config(config, jobs, target)); + let mut build_config = scrape_build_config(config, jobs, target)?; build_config.release = release; build_config.test = mode == CompileMode::Test || mode == CompileMode::Bench; build_config.json_errors = message_format == MessageFormat::Json; @@ -251,13 +256,13 @@ pub fn compile_ws<'a>(ws: &Workspace<'a>, build_config.doc_all = deps; } - try!(ops::compile_targets(ws, - &package_targets, - &packages, - &resolve_with_overrides, - config, - build_config, - profiles)) + ops::compile_targets(ws, + &package_targets, + &packages, + &resolve_with_overrides, + config, + build_config, + profiles)? }; ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect(); @@ -392,10 +397,10 @@ fn generate_targets<'a>(pkg: &'a Package, } Ok(()) }; - try!(find(bins, "bin", TargetKind::Bin, profile)); - try!(find(examples, "example", TargetKind::Example, build)); - try!(find(tests, "test", TargetKind::Test, test)); - try!(find(benches, "bench", TargetKind::Bench, &profiles.bench)); + find(bins, "bin", TargetKind::Bin, profile)?; + find(examples, "example", TargetKind::Example, build)?; + find(tests, "test", TargetKind::Test, test)?; + find(benches, "bench", TargetKind::Bench, &profiles.bench)?; } Ok(targets) } @@ -406,7 +411,7 @@ fn generate_targets<'a>(pkg: &'a Package, /// have been configured. fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, ws: &Workspace<'a>) -> CargoResult<()> { - let paths = match try!(ws.config().get_list("paths")) { + let paths = match ws.config().get_list("paths")? { Some(list) => list, None => return Ok(()) }; @@ -419,13 +424,13 @@ fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, }); for (path, definition) in paths { - let id = try!(SourceId::for_path(&path)); + let id = SourceId::for_path(&path)?; let mut source = PathSource::new_recursive(&path, &id, ws.config()); - try!(source.update().chain_error(|| { + source.update().chain_error(|| { human(format!("failed to update path override `{}` \ (defined in `{}`)", path.display(), definition.display())) - })); + })?; registry.add_override(&id, Box::new(source)); } Ok(()) @@ -443,7 +448,7 @@ fn scrape_build_config(config: &Config, jobs: Option, target: Option) -> CargoResult { - let cfg_jobs = match try!(config.get_i64("build.jobs")) { + let cfg_jobs = match config.get_i64("build.jobs")? { Some(v) => { if v.val <= 0 { bail!("build.jobs must be positive, but found {} in {}", @@ -458,17 +463,17 @@ fn scrape_build_config(config: &Config, None => None, }; let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32); - let cfg_target = try!(config.get_string("build.target")).map(|s| s.val); + let cfg_target = config.get_string("build.target")?.map(|s| s.val); let target = target.or(cfg_target); let mut base = ops::BuildConfig { - host_triple: try!(config.rustc()).host.clone(), + host_triple: config.rustc()?.host.clone(), requested_target: target.clone(), jobs: jobs, ..Default::default() }; - base.host = try!(scrape_target_config(config, &base.host_triple)); + base.host = scrape_target_config(config, &base.host_triple)?; base.target = match target.as_ref() { - Some(triple) => try!(scrape_target_config(config, &triple)), + Some(triple) => scrape_target_config(config, &triple)?, None => base.host.clone(), }; Ok(base) @@ -479,11 +484,11 @@ fn scrape_target_config(config: &Config, triple: &str) let key = format!("target.{}", triple); let mut ret = ops::TargetConfig { - ar: try!(config.get_path(&format!("{}.ar", key))).map(|v| v.val), - linker: try!(config.get_path(&format!("{}.linker", key))).map(|v| v.val), + ar: config.get_path(&format!("{}.ar", key))?.map(|v| v.val), + linker: config.get_path(&format!("{}.linker", key))?.map(|v| v.val), overrides: HashMap::new(), }; - let table = match try!(config.get_table(&key)) { + let table = match config.get_table(&key)? { Some(table) => table.val, None => return Ok(ret), }; @@ -500,36 +505,36 @@ fn scrape_target_config(config: &Config, triple: &str) rerun_if_changed: Vec::new(), warnings: Vec::new(), }; - for (k, value) in try!(value.table(&lib_name)).0 { + for (k, value) in value.table(&lib_name)?.0 { let key = format!("{}.{}", key, k); match &k[..] { "rustc-flags" => { - let (flags, definition) = try!(value.string(&k)); + let (flags, definition) = value.string(&k)?; let whence = format!("in `{}` (in {})", key, definition.display()); - let (paths, links) = try!( + let (paths, links) = BuildOutput::parse_rustc_flags(&flags, &whence) - ); + ?; output.library_paths.extend(paths); output.library_links.extend(links); } "rustc-link-lib" => { - let list = try!(value.list(&k)); + let list = value.list(&k)?; output.library_links.extend(list.iter() .map(|v| v.0.clone())); } "rustc-link-search" => { - let list = try!(value.list(&k)); + let list = value.list(&k)?; output.library_paths.extend(list.iter().map(|v| { PathBuf::from(&v.0) })); } "rustc-cfg" => { - let list = try!(value.list(&k)); + let list = value.list(&k)?; output.cfgs.extend(list.iter().map(|v| v.0.clone())); } _ => { - let val = try!(value.string(&k)).0; + let val = value.string(&k)?.0; output.metadata.push((k.clone(), val.to_string())); } } diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs index d9afaed03a9..8c27335745e 100644 --- a/src/cargo/ops/cargo_doc.rs +++ b/src/cargo/ops/cargo_doc.rs @@ -13,7 +13,7 @@ pub struct DocOptions<'a> { } pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> { - let package = try!(ws.current()); + let package = ws.current()?; let mut lib_names = HashSet::new(); let mut bin_names = HashSet::new(); @@ -34,13 +34,13 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> { } } - try!(ops::compile(ws, &options.compile_opts)); + ops::compile(ws, &options.compile_opts)?; if options.open_result { let name = if options.compile_opts.spec.len() > 1 { bail!("Passing multiple packages and `open` is not supported") } else if options.compile_opts.spec.len() == 1 { - try!(PackageIdSpec::parse(&options.compile_opts.spec[0])) + PackageIdSpec::parse(&options.compile_opts.spec[0])? .name() .replace("-", "_") } else { @@ -62,12 +62,12 @@ pub fn doc(ws: &Workspace, options: &DocOptions) -> CargoResult<()> { if fs::metadata(&path).is_ok() { let mut shell = options.compile_opts.config.shell(); match open_docs(&path) { - Ok(m) => try!(shell.status("Launching", m)), + Ok(m) => shell.status("Launching", m)?, Err(e) => { - try!(shell.warn( - "warning: could not determine a browser to open docs with, tried:")); + shell.warn( + "warning: could not determine a browser to open docs with, tried:")?; for method in e { - try!(shell.warn(format!("\t{}", method))); + shell.warn(format!("\t{}", method))?; } } } diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index a0144c8f472..1c30b0b7e8b 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -5,11 +5,11 @@ use util::CargoResult; /// Executes `cargo fetch`. pub fn fetch<'a>(ws: &Workspace<'a>) -> CargoResult<(Resolve, PackageSet<'a>)> { - let mut registry = try!(PackageRegistry::new(ws.config())); - let resolve = try!(ops::resolve_ws(&mut registry, ws)); + let mut registry = PackageRegistry::new(ws.config())?; + let resolve = ops::resolve_ws(&mut registry, ws)?; let packages = get_resolved_packages(&resolve, registry); for id in resolve.iter() { - try!(packages.get(id)); + packages.get(id)?; } Ok((resolve, packages)) } diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 25e2204ba9e..989534cea79 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -16,11 +16,11 @@ pub struct UpdateOptions<'a> { } pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> { - let mut registry = try!(PackageRegistry::new(ws.config())); - let resolve = try!(ops::resolve_with_previous(&mut registry, ws, - Method::Everything, - None, None, &[])); - try!(ops::write_pkg_lockfile(ws, &resolve)); + let mut registry = PackageRegistry::new(ws.config())?; + let resolve = ops::resolve_with_previous(&mut registry, ws, + Method::Everything, + None, None, &[])?; + ops::write_pkg_lockfile(ws, &resolve)?; Ok(()) } @@ -35,11 +35,11 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) bail!("you can't generate a lockfile for an empty workspace.") } - let previous_resolve = match try!(ops::load_pkg_lockfile(ws)) { + let previous_resolve = match ops::load_pkg_lockfile(ws)? { Some(resolve) => resolve, None => return generate_lockfile(ws), }; - let mut registry = try!(PackageRegistry::new(opts.config)); + let mut registry = PackageRegistry::new(opts.config)?; let mut to_avoid = HashSet::new(); if opts.to_update.is_empty() { @@ -47,7 +47,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) } else { let mut sources = Vec::new(); for name in opts.to_update { - let dep = try!(previous_resolve.query(name)); + let dep = previous_resolve.query(name)?; if opts.aggressive { fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new()); @@ -71,15 +71,15 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) }); } } - try!(registry.add_sources(&sources)); + registry.add_sources(&sources)?; } - let resolve = try!(ops::resolve_with_previous(&mut registry, + let resolve = ops::resolve_with_previous(&mut registry, ws, Method::Everything, Some(&previous_resolve), Some(&to_avoid), - &[])); + &[])?; // Summarize what is changing for the user. let print_change = |status: &str, msg: String| { @@ -93,18 +93,18 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) } else { format!("{} -> v{}", removed[0], added[0].version()) }; - try!(print_change("Updating", msg)); + print_change("Updating", msg)?; } else { for package in removed.iter() { - try!(print_change("Removing", format!("{}", package))); + print_change("Removing", format!("{}", package))?; } for package in added.iter() { - try!(print_change("Adding", format!("{}", package))); + print_change("Adding", format!("{}", package))?; } } } - try!(ops::write_pkg_lockfile(&ws, &resolve)); + ops::write_pkg_lockfile(&ws, &resolve)?; return Ok(()); fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId, diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index 93ceb40409a..c2a348e2e99 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -53,29 +53,29 @@ pub fn install(root: Option<&str>, opts: &ops::CompileOptions, force: bool) -> CargoResult<()> { let config = opts.config; - let root = try!(resolve_root(root, config)); - let map = try!(SourceConfigMap::new(config)); + let root = resolve_root(root, config)?; + let map = SourceConfigMap::new(config)?; let (pkg, source) = if source_id.is_git() { - try!(select_pkg(GitSource::new(source_id, config), source_id, - krate, vers, &mut |git| git.read_packages())) + select_pkg(GitSource::new(source_id, config), source_id, + krate, vers, &mut |git| git.read_packages())? } else if source_id.is_path() { let path = source_id.url().to_file_path().ok() .expect("path sources must have a valid path"); let mut src = PathSource::new(&path, source_id, config); - try!(src.update().chain_error(|| { + src.update().chain_error(|| { human(format!("`{}` is not a crate root; specify a crate to \ install from crates.io, or use --path or --git to \ specify an alternate source", path.display())) - })); - try!(select_pkg(PathSource::new(&path, source_id, config), - source_id, krate, vers, - &mut |path| path.read_packages())) + })?; + select_pkg(PathSource::new(&path, source_id, config), + source_id, krate, vers, + &mut |path| path.read_packages())? } else { - try!(select_pkg(try!(map.load(source_id)), - source_id, krate, vers, - &mut |_| Err(human("must specify a crate to install from \ - crates.io, or use --path or --git to \ - specify alternate source")))) + select_pkg(map.load(source_id)?, + source_id, krate, vers, + &mut |_| Err(human("must specify a crate to install from \ + crates.io, or use --path or --git to \ + specify alternate source")))? }; @@ -91,22 +91,22 @@ pub fn install(root: Option<&str>, }; let ws = match overidden_target_dir { - Some(dir) => try!(Workspace::one(pkg, config, Some(dir))), - None => try!(Workspace::new(pkg.manifest_path(), config)), + Some(dir) => Workspace::one(pkg, config, Some(dir))?, + None => Workspace::new(pkg.manifest_path(), config)?, }; - let pkg = try!(ws.current()); + let pkg = ws.current()?; // Preflight checks to check up front whether we'll overwrite something. // We have to check this again afterwards, but may as well avoid building // anything if we're gonna throw it away anyway. { - let metadata = try!(metadata(config, &root)); - let list = try!(read_crate_list(metadata.file())); + let metadata = metadata(config, &root)?; + let list = read_crate_list(metadata.file())?; let dst = metadata.parent().join("bin"); - try!(check_overwrites(&dst, pkg, &opts.filter, &list, force)); + check_overwrites(&dst, pkg, &opts.filter, &list, force)?; } - let compile = try!(ops::compile_ws(&ws, Some(source), opts).chain_error(|| { + let compile = ops::compile_ws(&ws, Some(source), opts).chain_error(|| { if let Some(td) = td_opt.take() { // preserve the temporary directory, so the user can inspect it td.into_path(); @@ -114,28 +114,28 @@ pub fn install(root: Option<&str>, human(format!("failed to compile `{}`, intermediate artifacts can be \ found at `{}`", pkg, ws.target_dir().display())) - })); - let binaries: Vec<(&str, &Path)> = try!(compile.binaries.iter().map(|bin| { + })?; + let binaries: Vec<(&str, &Path)> = compile.binaries.iter().map(|bin| { let name = bin.file_name().unwrap(); if let Some(s) = name.to_str() { Ok((s, bin.as_ref())) } else { bail!("Binary `{:?}` name can't be serialized into string", name) } - }).collect::>()); + }).collect::>()?; - let metadata = try!(metadata(config, &root)); - let mut list = try!(read_crate_list(metadata.file())); + let metadata = metadata(config, &root)?; + let mut list = read_crate_list(metadata.file())?; let dst = metadata.parent().join("bin"); - let duplicates = try!(check_overwrites(&dst, pkg, &opts.filter, - &list, force)); + let duplicates = check_overwrites(&dst, pkg, &opts.filter, + &list, force)?; - try!(fs::create_dir_all(&dst)); + fs::create_dir_all(&dst)?; // Copy all binaries to a temporary directory under `dst` first, catching // some failure modes (e.g. out of space) before touching the existing // binaries. This directory will get cleaned up via RAII. - let staging_dir = try!(TempDir::new_in(&dst, "cargo-install")); + let staging_dir = TempDir::new_in(&dst, "cargo-install")?; for &(bin, src) in binaries.iter() { let dst = staging_dir.path().join(bin); // Try to move if `target_dir` is transient. @@ -144,10 +144,10 @@ pub fn install(root: Option<&str>, continue } } - try!(fs::copy(src, &dst).chain_error(|| { + fs::copy(src, &dst).chain_error(|| { human(format!("failed to copy `{}` to `{}`", src.display(), dst.display())) - })); + })?; } let (to_replace, to_install): (Vec<&str>, Vec<&str>) = @@ -160,11 +160,11 @@ pub fn install(root: Option<&str>, for bin in to_install.iter() { let src = staging_dir.path().join(bin); let dst = dst.join(bin); - try!(config.shell().status("Installing", dst.display())); - try!(fs::rename(&src, &dst).chain_error(|| { + config.shell().status("Installing", dst.display())?; + fs::rename(&src, &dst).chain_error(|| { human(format!("failed to move `{}` to `{}`", src.display(), dst.display())) - })); + })?; installed.bins.push(dst); } @@ -176,11 +176,11 @@ pub fn install(root: Option<&str>, for &bin in to_replace.iter() { let src = staging_dir.path().join(bin); let dst = dst.join(bin); - try!(config.shell().status("Replacing", dst.display())); - try!(fs::rename(&src, &dst).chain_error(|| { + config.shell().status("Replacing", dst.display())?; + fs::rename(&src, &dst).chain_error(|| { human(format!("failed to move `{}` to `{}`", src.display(), dst.display())) - })); + })?; replaced_names.push(bin); } Ok(()) @@ -219,8 +219,8 @@ pub fn install(root: Option<&str>, match write_result { // Replacement error (if any) isn't actually caused by write error // but this seems to be the only way to show both. - Err(err) => try!(result.chain_error(|| err)), - Ok(_) => try!(result), + Err(err) => result.chain_error(|| err)?, + Ok(_) => result?, } // Reaching here means all actions have succeeded. Clean up. @@ -229,7 +229,7 @@ pub fn install(root: Option<&str>, // Don't bother grabbing a lock as we're going to blow it all away // anyway. let target_dir = ws.target_dir().into_path_unlocked(); - try!(fs::remove_dir_all(&target_dir)); + fs::remove_dir_all(&target_dir)?; } // Print a warning that if this directory isn't in PATH that they won't be @@ -241,9 +241,9 @@ pub fn install(root: Option<&str>, } } - try!(config.shell().warn(&format!("be sure to add `{}` to your PATH to be \ + config.shell().warn(&format!("be sure to add `{}` to your PATH to be \ able to run the installed binaries", - dst.display()))); + dst.display()))?; Ok(()) } @@ -255,14 +255,14 @@ fn select_pkg<'a, T>(mut source: T, -> CargoResult<(Package, Box)> where T: Source + 'a { - try!(source.update()); + source.update()?; match name { Some(name) => { - let dep = try!(Dependency::parse_no_deprecated(name, vers, source_id)); - let deps = try!(source.query(&dep)); + let dep = Dependency::parse_no_deprecated(name, vers, source_id)?; + let deps = source.query(&dep)?; match deps.iter().map(|p| p.package_id()).max() { Some(pkgid) => { - let pkg = try!(source.download(pkgid)); + let pkg = source.download(pkgid)?; Ok((pkg, Box::new(source))) } None => { @@ -274,17 +274,17 @@ fn select_pkg<'a, T>(mut source: T, } } None => { - let candidates = try!(list_all(&mut source)); + let candidates = list_all(&mut source)?; let binaries = candidates.iter().filter(|cand| { cand.targets().iter().filter(|t| t.is_bin()).count() > 0 }); let examples = candidates.iter().filter(|cand| { cand.targets().iter().filter(|t| t.is_example()).count() > 0 }); - let pkg = match try!(one(binaries, |v| multi_err("binaries", v))) { + let pkg = match one(binaries, |v| multi_err("binaries", v))? { Some(p) => p, None => { - match try!(one(examples, |v| multi_err("examples", v))) { + match one(examples, |v| multi_err("examples", v))? { Some(p) => p, None => bail!("no packages found with binaries or \ examples"), @@ -381,10 +381,10 @@ fn find_duplicates(dst: &Path, fn read_crate_list(mut file: &File) -> CargoResult { (|| -> CargoResult<_> { let mut contents = String::new(); - try!(file.read_to_string(&mut contents)); - let listing = try!(toml::decode_str(&contents).chain_error(|| { + file.read_to_string(&mut contents)?; + let listing = toml::decode_str(&contents).chain_error(|| { internal("invalid TOML found for metadata") - })); + })?; match listing { CrateListing::V1(v1) => Ok(v1), CrateListing::Empty => { @@ -398,10 +398,10 @@ fn read_crate_list(mut file: &File) -> CargoResult { fn write_crate_list(mut file: &File, listing: CrateListingV1) -> CargoResult<()> { (|| -> CargoResult<_> { - try!(file.seek(SeekFrom::Start(0))); - try!(file.set_len(0)); + file.seek(SeekFrom::Start(0))?; + file.set_len(0)?; let data = toml::encode_str::(&CrateListing::V1(listing)); - try!(file.write_all(data.as_bytes())); + file.write_all(data.as_bytes())?; Ok(()) }).chain_error(|| { human("failed to write crate metadata") @@ -409,15 +409,15 @@ fn write_crate_list(mut file: &File, listing: CrateListingV1) -> CargoResult<()> } pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { - let dst = try!(resolve_root(dst, config)); - let dst = try!(metadata(config, &dst)); - let list = try!(read_crate_list(dst.file())); + let dst = resolve_root(dst, config)?; + let dst = metadata(config, &dst)?; + let list = read_crate_list(dst.file())?; let mut shell = config.shell(); let out = shell.out(); for (k, v) in list.v1.iter() { - try!(writeln!(out, "{}:", k)); + writeln!(out, "{}:", k)?; for bin in v { - try!(writeln!(out, " {}", bin)); + writeln!(out, " {}", bin)?; } } Ok(()) @@ -427,12 +427,12 @@ pub fn uninstall(root: Option<&str>, spec: &str, bins: &[String], config: &Config) -> CargoResult<()> { - let root = try!(resolve_root(root, config)); - let crate_metadata = try!(metadata(config, &root)); - let mut metadata = try!(read_crate_list(crate_metadata.file())); + let root = resolve_root(root, config)?; + let crate_metadata = metadata(config, &root)?; + let mut metadata = read_crate_list(crate_metadata.file())?; let mut to_remove = Vec::new(); { - let result = try!(PackageIdSpec::query_str(spec, metadata.v1.keys())) + let result = PackageIdSpec::query_str(spec, metadata.v1.keys())? .clone(); let mut installed = match metadata.v1.entry(result.clone()) { Entry::Occupied(e) => e, @@ -474,10 +474,10 @@ pub fn uninstall(root: Option<&str>, installed.remove(); } } - try!(write_crate_list(crate_metadata.file(), metadata)); + write_crate_list(crate_metadata.file(), metadata)?; for bin in to_remove { - try!(config.shell().status("Removing", bin.display())); - try!(fs::remove_file(bin)); + config.shell().status("Removing", bin.display())?; + fs::remove_file(bin)?; } Ok(()) @@ -489,7 +489,7 @@ fn metadata(config: &Config, root: &Filesystem) -> CargoResult { fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult { - let config_root = try!(config.get_path("install.root")); + let config_root = config.get_path("install.root")?; Ok(flag.map(PathBuf::from).or_else(|| { env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from) }).or_else(move || { diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index fe00a74d989..6b41f3ed5d3 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -42,7 +42,7 @@ struct MkOptions<'a> { impl Decodable for VersionControl { fn decode(d: &mut D) -> Result { - Ok(match &try!(d.read_str())[..] { + Ok(match &d.read_str()?[..] { "git" => VersionControl::Git, "hg" => VersionControl::Hg, "none" => VersionControl::NoVcs, @@ -95,10 +95,10 @@ fn get_name<'a>(path: &'a Path, opts: &'a NewOptions, config: &Config) -> CargoR path.as_os_str()); } - let dir_name = try!(path.file_name().and_then(|s| s.to_str()).chain_error(|| { + let dir_name = path.file_name().and_then(|s| s.to_str()).chain_error(|| { human(&format!("cannot create a project with a non-unicode name: {:?}", path.file_name().unwrap())) - })); + })?; if opts.bin { Ok(dir_name) @@ -108,7 +108,7 @@ fn get_name<'a>(path: &'a Path, opts: &'a NewOptions, config: &Config) -> CargoR let message = format!( "note: package will be named `{}`; use --name to override", new_name); - try!(config.shell().say(&message, BLACK)); + config.shell().say(&message, BLACK)?; } Ok(new_name) } @@ -196,7 +196,7 @@ fn detect_source_paths_and_types(project_path : &Path, } } H::Detect => { - let content = try!(paths::read(&path.join(pp.clone()))); + let content = paths::read(&path.join(pp.clone()))?; let isbin = content.contains("fn main"); SourceFileInformation { relative_path: pp, @@ -265,8 +265,8 @@ pub fn new(opts: NewOptions, config: &Config) -> CargoResult<()> { bail!("can't specify both lib and binary outputs"); } - let name = try!(get_name(&path, &opts, config)); - try!(check_name(name)); + let name = get_name(&path, &opts, config)?; + check_name(name)?; let mkopts = MkOptions { version_control: opts.version_control, @@ -294,12 +294,12 @@ pub fn init(opts: NewOptions, config: &Config) -> CargoResult<()> { bail!("can't specify both lib and binary outputs"); } - let name = try!(get_name(&path, &opts, config)); - try!(check_name(name)); + let name = get_name(&path, &opts, config)?; + check_name(name)?; let mut src_paths_types = vec![]; - try!(detect_source_paths_and_types(&path, name, &mut src_paths_types)); + detect_source_paths_and_types(&path, name, &mut src_paths_types)?; if src_paths_types.len() == 0 { src_paths_types.push(plan_new_source_file(opts.bin, name.to_string())); @@ -369,7 +369,7 @@ fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool { fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> { let path = opts.path; let name = opts.name; - let cfg = try!(global_config(config)); + let cfg = global_config(config)?; let mut ignore = "target\n".to_string(); let in_existing_vcs_repo = existing_vcs_repo(path.parent().unwrap(), config.cwd()); if !opts.bin { @@ -386,22 +386,22 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> { match vcs { VersionControl::Git => { if !fs::metadata(&path.join(".git")).is_ok() { - try!(GitRepo::init(path, config.cwd())); + GitRepo::init(path, config.cwd())?; } - try!(paths::append(&path.join(".gitignore"), ignore.as_bytes())); + paths::append(&path.join(".gitignore"), ignore.as_bytes())?; }, VersionControl::Hg => { if !fs::metadata(&path.join(".hg")).is_ok() { - try!(HgRepo::init(path, config.cwd())); + HgRepo::init(path, config.cwd())?; } - try!(paths::append(&path.join(".hgignore"), ignore.as_bytes())); + paths::append(&path.join(".hgignore"), ignore.as_bytes())?; }, VersionControl::NoVcs => { - try!(fs::create_dir_all(path)); + fs::create_dir_all(path)?; }, }; - let (author_name, email) = try!(discover_author()); + let (author_name, email) = discover_author()?; // Hoo boy, sure glad we've got exhaustivenes checking behind us. let author = match (cfg.name, cfg.email, author_name, email) { (Some(name), Some(email), _, _) | @@ -438,14 +438,14 @@ path = {} // Create Cargo.toml file with necessary [lib] and [[bin]] sections, if needed - try!(paths::write(&path.join("Cargo.toml"), format!( + paths::write(&path.join("Cargo.toml"), format!( r#"[package] name = "{}" version = "0.1.0" authors = [{}] [dependencies] -{}"#, name, toml::Value::String(author), cargotoml_path_specifier).as_bytes())); +{}"#, name, toml::Value::String(author), cargotoml_path_specifier).as_bytes())?; // Create all specified source files @@ -456,7 +456,7 @@ authors = [{}] let path_of_source_file = path.join(i.relative_path.clone()); if let Some(src_dir) = path_of_source_file.parent() { - try!(fs::create_dir_all(src_dir)); + fs::create_dir_all(src_dir)?; } let default_file_content : &[u8] = if i.bin { @@ -477,14 +477,14 @@ mod tests { }; if !fs::metadata(&path_of_source_file).map(|x| x.is_file()).unwrap_or(false) { - try!(paths::write(&path_of_source_file, default_file_content)); + paths::write(&path_of_source_file, default_file_content)?; } } if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) { let msg = format!("compiling this new crate may not work due to invalid \ workspace configuration\n\n{}", e); - try!(config.shell().warn(msg)); + config.shell().warn(msg)?; } Ok(()) @@ -526,9 +526,9 @@ fn discover_author() -> CargoResult<(String, Option)> { } fn global_config(config: &Config) -> CargoResult { - let name = try!(config.get_string("cargo-new.name")).map(|s| s.val); - let email = try!(config.get_string("cargo-new.email")).map(|s| s.val); - let vcs = try!(config.get_string("cargo-new.vcs")); + let name = config.get_string("cargo-new.name")?.map(|s| s.val); + let email = config.get_string("cargo-new.email")?.map(|s| s.val); + let vcs = config.get_string("cargo-new.vcs")?; let vcs = match vcs.as_ref().map(|p| (&p.val[..], &p.definition)) { Some(("git", _)) => Some(VersionControl::Git), diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index cda9b53e67f..bc1a04f0cfd 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -1,7 +1,7 @@ use rustc_serialize::{Encodable, Encoder}; use core::resolver::Resolve; -use core::{Package, PackageId, Workspace}; +use core::{Package, PackageId, PackageIdSpec, Workspace}; use ops; use util::CargoResult; @@ -43,12 +43,15 @@ fn metadata_no_deps(ws: &Workspace, fn metadata_full(ws: &Workspace, opt: &OutputMetadataOptions) -> CargoResult { - let deps = try!(ops::resolve_dependencies(ws, - None, - &opt.features, - opt.all_features, - opt.no_default_features, - &[])); + let specs = ws.members().map(|pkg| { + PackageIdSpec::from_package_id(pkg.package_id()) + }).collect::>(); + let deps = ops::resolve_dependencies(ws, + None, + &opt.features, + opt.all_features, + opt.no_default_features, + &specs)?; let (packages, resolve) = deps; let packages = try!(packages.package_ids() diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 55f8dd64fc9..26841796ec8 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -24,22 +24,22 @@ pub struct PackageOpts<'cfg> { pub fn package(ws: &Workspace, opts: &PackageOpts) -> CargoResult> { - let pkg = try!(ws.current()); + let pkg = ws.current()?; let config = ws.config(); let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config); - try!(src.update()); + src.update()?; if opts.check_metadata { - try!(check_metadata(pkg, config)); + check_metadata(pkg, config)?; } - try!(verify_dependencies(&pkg)); + verify_dependencies(&pkg)?; if opts.list { let root = pkg.root(); - let mut list: Vec<_> = try!(src.list_files(&pkg)).iter().map(|file| { + let mut list: Vec<_> = src.list_files(&pkg)?.iter().map(|file| { util::without_prefix(&file, &root).unwrap().to_path_buf() }).collect(); list.sort(); @@ -50,38 +50,38 @@ pub fn package(ws: &Workspace, } if !opts.allow_dirty { - try!(check_not_dirty(&pkg, &src)); + check_not_dirty(&pkg, &src)?; } let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let dir = ws.target_dir().join("package"); let mut dst = { let tmp = format!(".{}", filename); - try!(dir.open_rw(&tmp, config, "package scratch space")) + dir.open_rw(&tmp, config, "package scratch space")? }; // Package up and test a temporary tarball and only move it to the final // location if it actually passes all our tests. Any previously existing // tarball can be assumed as corrupt or invalid, so we just blow it away if // it exists. - try!(config.shell().status("Packaging", pkg.package_id().to_string())); - try!(dst.file().set_len(0)); - try!(tar(ws, &src, dst.file(), &filename).chain_error(|| { + config.shell().status("Packaging", pkg.package_id().to_string())?; + dst.file().set_len(0)?; + tar(ws, &src, dst.file(), &filename).chain_error(|| { human("failed to prepare local package for uploading") - })); + })?; if opts.verify { - try!(dst.seek(SeekFrom::Start(0))); - try!(run_verify(ws, dst.file(), opts).chain_error(|| { + dst.seek(SeekFrom::Start(0))?; + run_verify(ws, dst.file(), opts).chain_error(|| { human("failed to verify package tarball") - })) + })? } - try!(dst.seek(SeekFrom::Start(0))); + dst.seek(SeekFrom::Start(0))?; { let src_path = dst.path(); let dst_path = dst.parent().join(&filename); - try!(fs::rename(&src_path, &dst_path).chain_error(|| { + fs::rename(&src_path, &dst_path).chain_error(|| { human("failed to move temporary tarball into final location") - })); + })?; } Ok(Some(dst)) } @@ -113,10 +113,10 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { } things.push_str(&missing.last().unwrap()); - try!(config.shell().warn( + config.shell().warn( &format!("manifest has no {things}.\n\ See http://doc.crates.io/manifest.html#package-metadata for more info.", - things = things))) + things = things))? } Ok(()) } @@ -159,7 +159,7 @@ fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> { src: &PathSource, repo: &git2::Repository) -> CargoResult<()> { let workdir = repo.workdir().unwrap(); - let dirty = try!(src.list_files(p)).iter().filter(|file| { + let dirty = src.list_files(p)?.iter().filter(|file| { let relative = file.strip_prefix(workdir).unwrap(); if let Ok(status) = repo.status_file(relative) { status != git2::STATUS_CURRENT @@ -185,27 +185,27 @@ fn tar(ws: &Workspace, filename: &str) -> CargoResult<()> { // Prepare the encoder and its header let filename = Path::new(filename); - let encoder = GzBuilder::new().filename(try!(util::path2bytes(filename))) + let encoder = GzBuilder::new().filename(util::path2bytes(filename)?) .write(dst, Compression::Best); // Put all package files into a compressed archive let mut ar = Builder::new(encoder); - let pkg = try!(ws.current()); + let pkg = ws.current()?; let config = ws.config(); let root = pkg.root(); - for file in try!(src.list_files(pkg)).iter() { + for file in src.list_files(pkg)?.iter() { let relative = util::without_prefix(&file, &root).unwrap(); - try!(check_filename(relative)); - let relative = try!(relative.to_str().chain_error(|| { + check_filename(relative)?; + let relative = relative.to_str().chain_error(|| { human(format!("non-utf8 path in source directory: {}", relative.display())) - })); - let mut file = try!(File::open(file).chain_error(|| { + })?; + let mut file = File::open(file).chain_error(|| { human(format!("failed to open for archiving: `{}`", file.display())) - })); - try!(config.shell().verbose(|shell| { + })?; + config.shell().verbose(|shell| { shell.status("Archiving", &relative) - })); + })?; let path = format!("{}-{}{}{}", pkg.name(), pkg.version(), path::MAIN_SEPARATOR, relative); @@ -228,38 +228,38 @@ fn tar(ws: &Workspace, // unpack the selectors 0.4.0 crate on crates.io. Either that or take a // look at rust-lang/cargo#2326 let mut header = Header::new_ustar(); - let metadata = try!(file.metadata().chain_error(|| { + let metadata = file.metadata().chain_error(|| { human(format!("could not learn metadata for: `{}`", relative)) - })); - try!(header.set_path(&path).chain_error(|| { + })?; + header.set_path(&path).chain_error(|| { human(format!("failed to add to archive: `{}`", relative)) - })); + })?; header.set_metadata(&metadata); header.set_cksum(); - try!(ar.append(&header, &mut file).chain_error(|| { + ar.append(&header, &mut file).chain_error(|| { internal(format!("could not archive source file `{}`", relative)) - })); + })?; } - let encoder = try!(ar.into_inner()); - try!(encoder.finish()); + let encoder = ar.into_inner()?; + encoder.finish()?; Ok(()) } fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()> { let config = ws.config(); - let pkg = try!(ws.current()); + let pkg = ws.current()?; - try!(config.shell().status("Verifying", pkg)); + config.shell().status("Verifying", pkg)?; - let f = try!(GzDecoder::new(tar)); + let f = GzDecoder::new(tar)?; let dst = pkg.root().join(&format!("target/package/{}-{}", pkg.name(), pkg.version())); if fs::metadata(&dst).is_ok() { - try!(fs::remove_dir_all(&dst)); + fs::remove_dir_all(&dst)?; } let mut archive = Archive::new(f); - try!(archive.unpack(dst.parent().unwrap())); + archive.unpack(dst.parent().unwrap())?; let manifest_path = dst.join("Cargo.toml"); // When packages are uploaded to a registry, all path dependencies are @@ -270,10 +270,10 @@ fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()> // location that the package was originally read from. In locking the // `SourceId` we're telling it that the corresponding `PathSource` will be // considered updated and we won't actually read any packages. - let cratesio = try!(SourceId::crates_io(config)); + let cratesio = SourceId::crates_io(config)?; let precise = Some("locked".to_string()); - let new_src = try!(SourceId::for_path(&dst)).with_precise(precise); - let new_pkgid = try!(PackageId::new(pkg.name(), pkg.version(), &new_src)); + let new_src = SourceId::for_path(&dst)?.with_precise(precise); + let new_pkgid = PackageId::new(pkg.name(), pkg.version(), &new_src)?; let new_summary = pkg.summary().clone().map_dependencies(|d| { if !d.source_id().is_path() { return d } d.clone_inner().set_source_id(cratesio.clone()).into_dependency() @@ -283,8 +283,8 @@ fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()> let new_pkg = Package::new(new_manifest, &manifest_path); // Now that we've rewritten all our path dependencies, compile it! - let ws = try!(Workspace::one(new_pkg, config, None)); - try!(ops::compile_ws(&ws, None, &ops::CompileOptions { + let ws = Workspace::one(new_pkg, config, None)?; + ops::compile_ws(&ws, None, &ops::CompileOptions { config: config, jobs: opts.jobs, target: None, @@ -298,7 +298,7 @@ fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()> mode: ops::CompileMode::Build, target_rustdoc_args: None, target_rustc_args: None, - })); + })?; Ok(()) } diff --git a/src/cargo/ops/cargo_pkgid.rs b/src/cargo/ops/cargo_pkgid.rs index 94737d507d4..0461bc4c87f 100644 --- a/src/cargo/ops/cargo_pkgid.rs +++ b/src/cargo/ops/cargo_pkgid.rs @@ -3,14 +3,14 @@ use core::{PackageIdSpec, Workspace}; use util::CargoResult; pub fn pkgid(ws: &Workspace, spec: Option<&str>) -> CargoResult { - let resolve = match try!(ops::load_pkg_lockfile(ws)) { + let resolve = match ops::load_pkg_lockfile(ws)? { Some(resolve) => resolve, None => bail!("a Cargo.lock must exist for this command"), }; let pkgid = match spec { - Some(spec) => try!(PackageIdSpec::query_str(spec, resolve.iter())), - None => try!(ws.current()).package_id(), + Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?, + None => ws.current()?.package_id(), }; Ok(PackageIdSpec::from_package_id(pkgid)) } diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs index 39028b798a2..5c25edbf1c9 100644 --- a/src/cargo/ops/cargo_read_manifest.rs +++ b/src/cargo/ops/cargo_read_manifest.rs @@ -11,7 +11,7 @@ use util::toml::Layout; pub fn read_manifest(path: &Path, source_id: &SourceId, config: &Config) -> CargoResult<(EitherManifest, Vec)> { trace!("read_package; path={}; source-id={}", path.display(), source_id); - let contents = try!(paths::read(path)); + let contents = paths::read(path)?; let layout = Layout::from_project_path(path.parent().unwrap()); let root = layout.root.clone(); @@ -24,7 +24,7 @@ pub fn read_manifest(path: &Path, source_id: &SourceId, config: &Config) pub fn read_package(path: &Path, source_id: &SourceId, config: &Config) -> CargoResult<(Package, Vec)> { trace!("read_package; path={}; source-id={}", path.display(), source_id); - let (manifest, nested) = try!(read_manifest(path, source_id, config)); + let (manifest, nested) = read_manifest(path, source_id, config)?; let manifest = match manifest { EitherManifest::Real(manifest) => manifest, EitherManifest::Virtual(..) => { @@ -43,7 +43,7 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) trace!("looking for root package: {}, source_id={}", path.display(), source_id); - try!(walk(path, &mut |dir| { + walk(path, &mut |dir| { trace!("looking for child package: {}", dir.display()); // Don't recurse into hidden/dot directories unless we're at the toplevel @@ -66,11 +66,11 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) } if has_manifest(dir) { - try!(read_nested_packages(dir, &mut all_packages, source_id, config, - &mut visited)); + read_nested_packages(dir, &mut all_packages, source_id, config, + &mut visited)?; } Ok(true) - })); + })?; if all_packages.is_empty() { Err(human(format!("Could not find Cargo.toml in `{}`", path.display()))) @@ -81,7 +81,7 @@ pub fn read_packages(path: &Path, source_id: &SourceId, config: &Config) fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult) -> CargoResult<()> { - if !try!(callback(path)) { + if !callback(path)? { trace!("not processing {}", path.display()); return Ok(()) } @@ -100,9 +100,9 @@ fn walk(path: &Path, callback: &mut FnMut(&Path) -> CargoResult) } }; for dir in dirs { - let dir = try!(dir); - if try!(dir.file_type()).is_dir() { - try!(walk(&dir.path(), callback)); + let dir = dir?; + if dir.file_type()?.is_dir() { + walk(&dir.path(), callback)?; } } Ok(()) @@ -119,9 +119,9 @@ fn read_nested_packages(path: &Path, visited: &mut HashSet) -> CargoResult<()> { if !visited.insert(path.to_path_buf()) { return Ok(()) } - let manifest_path = try!(find_project_manifest_exact(path, "Cargo.toml")); + let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?; - let (manifest, nested) = try!(read_manifest(&manifest_path, source_id, config)); + let (manifest, nested) = read_manifest(&manifest_path, source_id, config)?; let manifest = match manifest { EitherManifest::Real(manifest) => manifest, EitherManifest::Virtual(..) => return Ok(()), @@ -147,8 +147,8 @@ fn read_nested_packages(path: &Path, if !source_id.is_registry() { for p in nested.iter() { let path = util::normalize_path(&path.join(p)); - try!(read_nested_packages(&path, all_packages, source_id, - config, visited)); + read_nested_packages(&path, all_packages, source_id, + config, visited)?; } } diff --git a/src/cargo/ops/cargo_run.rs b/src/cargo/ops/cargo_run.rs index 897a3da5116..1c4c4a485c1 100644 --- a/src/cargo/ops/cargo_run.rs +++ b/src/cargo/ops/cargo_run.rs @@ -8,7 +8,7 @@ pub fn run(ws: &Workspace, options: &ops::CompileOptions, args: &[String]) -> CargoResult> { let config = ws.config(); - let root = try!(ws.current()); + let root = ws.current()?; let mut bins = root.manifest().targets().iter().filter(|a| { !a.is_lib() && !a.is_custom_build() && match options.filter { @@ -40,7 +40,7 @@ pub fn run(ws: &Workspace, } } - let compile = try!(ops::compile(ws, options)); + let compile = ops::compile(ws, options)?; let exe = &compile.binaries[0]; let exe = match util::without_prefix(&exe, config.cwd()) { Some(path) if path.file_name() == Some(path.as_os_str()) @@ -48,9 +48,9 @@ pub fn run(ws: &Workspace, Some(path) => path.to_path_buf(), None => exe.to_path_buf(), }; - let mut process = try!(compile.target_process(exe, &root)); + let mut process = compile.target_process(exe, &root)?; process.args(args).cwd(config.cwd()); - try!(config.shell().status("Running", process.to_string())); + config.shell().status("Running", process.to_string())?; Ok(process.exec_replace().err()) } diff --git a/src/cargo/ops/cargo_rustc/compilation.rs b/src/cargo/ops/cargo_rustc/compilation.rs index f15032c2336..afb8962589b 100644 --- a/src/cargo/ops/cargo_rustc/compilation.rs +++ b/src/cargo/ops/cargo_rustc/compilation.rs @@ -71,12 +71,12 @@ impl<'cfg> Compilation<'cfg> { /// See `process`. pub fn rustc_process(&self, pkg: &Package) -> CargoResult { - self.fill_env(try!(self.config.rustc()).process(), pkg, true) + self.fill_env(self.config.rustc()?.process(), pkg, true) } /// See `process`. pub fn rustdoc_process(&self, pkg: &Package) -> CargoResult { - self.fill_env(process(&*try!(self.config.rustdoc())), pkg, false) + self.fill_env(process(&*self.config.rustdoc()?), pkg, false) } /// See `process`. @@ -128,7 +128,7 @@ impl<'cfg> Compilation<'cfg> { }; search_path.extend(util::dylib_path().into_iter()); - let search_path = try!(join_paths(&search_path, util::dylib_path_envvar())); + let search_path = join_paths(&search_path, util::dylib_path_envvar())?; cmd.env(util::dylib_path_envvar(), &search_path); if let Some(env) = self.extra_env.get(pkg.package_id()) { diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index a1552d63654..d95b7ee75c6 100644 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -62,15 +62,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> { profiles: &'a Profiles) -> CargoResult> { let dest = if build_config.release { "release" } else { "debug" }; - let host_layout = try!(Layout::new(ws, None, &dest)); + let host_layout = Layout::new(ws, None, &dest)?; let target_layout = match build_config.requested_target.as_ref() { Some(target) => { - Some(try!(Layout::new(ws, Some(&target), &dest))) + Some(Layout::new(ws, Some(&target), &dest)?) } None => None, }; - let current_package = try!(ws.current()).package_id().clone(); + let current_package = ws.current()?.package_id().clone(); Ok(Context { host: host_layout, target: target_layout, @@ -98,14 +98,14 @@ impl<'a, 'cfg> Context<'a, 'cfg> { pub fn prepare(&mut self) -> CargoResult<()> { let _p = profile::start("preparing layout"); - try!(self.host.prepare().chain_error(|| { + self.host.prepare().chain_error(|| { internal(format!("couldn't prepare build directories")) - })); + })?; match self.target { Some(ref mut target) => { - try!(target.prepare().chain_error(|| { + target.prepare().chain_error(|| { internal(format!("couldn't prepare build directories")) - })); + })?; } None => {} } @@ -128,13 +128,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> { crate_types.insert("bin".to_string()); crate_types.insert("rlib".to_string()); for unit in units { - try!(self.visit_crate_type(unit, &mut crate_types)); + self.visit_crate_type(unit, &mut crate_types)?; } - try!(self.probe_target_info_kind(&crate_types, Kind::Target)); + self.probe_target_info_kind(&crate_types, Kind::Target)?; if self.requested_target().is_none() { self.host_info = self.target_info.clone(); } else { - try!(self.probe_target_info_kind(&crate_types, Kind::Host)); + self.probe_target_info_kind(&crate_types, Kind::Host)?; } Ok(()) } @@ -152,8 +152,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } })); } - for dep in try!(self.dep_targets(&unit)) { - try!(self.visit_crate_type(&dep, crate_types)); + for dep in self.dep_targets(&unit)? { + self.visit_crate_type(&dep, crate_types)?; } Ok(()) } @@ -162,11 +162,11 @@ impl<'a, 'cfg> Context<'a, 'cfg> { crate_types: &BTreeSet, kind: Kind) -> CargoResult<()> { - let rustflags = try!(env_args(self.config, + let rustflags = env_args(self.config, &self.build_config, kind, - "RUSTFLAGS")); - let mut process = try!(self.config.rustc()).process(); + "RUSTFLAGS")?; + let mut process = self.config.rustc()?.process(); process.arg("-") .arg("--crate-name").arg("_") .arg("--print=file-names") @@ -184,13 +184,13 @@ impl<'a, 'cfg> Context<'a, 'cfg> { with_cfg.arg("--print=cfg"); let mut has_cfg = true; - let output = try!(with_cfg.exec_with_output().or_else(|_| { + let output = with_cfg.exec_with_output().or_else(|_| { has_cfg = false; process.exec_with_output() }).chain_error(|| { human(format!("failed to run `rustc` to learn about \ target-specific information")) - })); + })?; let error = str::from_utf8(&output.stderr).unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); @@ -245,9 +245,9 @@ impl<'a, 'cfg> Context<'a, 'cfg> { -> CargoResult<()> { let mut visited = HashSet::new(); for unit in units { - try!(self.walk_used_in_plugin_map(unit, + self.walk_used_in_plugin_map(unit, unit.target.for_host(), - &mut visited)); + &mut visited)?; } Ok(()) } @@ -263,10 +263,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> { if is_plugin { self.used_in_plugin.insert(*unit); } - for unit in try!(self.dep_targets(unit)) { - try!(self.walk_used_in_plugin_map(&unit, + for unit in self.dep_targets(unit)? { + self.walk_used_in_plugin_map(&unit, is_plugin || unit.target.for_host(), - visited)); + visited)?; } Ok(()) } @@ -483,14 +483,14 @@ impl<'a, 'cfg> Context<'a, 'cfg> { TargetKind::CustomBuild | TargetKind::Bench | TargetKind::Test => { - try!(add("bin", false)); + add("bin", false)?; } TargetKind::Lib(..) if unit.profile.test => { - try!(add("bin", false)); + add("bin", false)?; } TargetKind::Lib(ref libs) => { for lib in libs { - try!(add(lib.crate_type(), lib.linkable())); + add(lib.crate_type(), lib.linkable())?; } } } @@ -520,7 +520,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { let id = unit.pkg.package_id(); let deps = self.resolve.deps(id); - let mut ret = try!(deps.filter(|dep| { + let mut ret = deps.filter(|dep| { unit.pkg.dependencies().iter().filter(|d| { d.name() == dep.name() && d.version_req().matches(dep.version()) }).any(|d| { @@ -571,7 +571,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { } Err(e) => Some(Err(e)) } - }).collect::>>()); + }).collect::>>()?; // If this target is a build script, then what we've collected so far is // all we need. If this isn't a build script, then it depends on the @@ -632,7 +632,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { profile: &self.profiles.dev, ..*unit }; - let deps = try!(self.dep_targets(&tmp)); + let deps = self.dep_targets(&tmp)?; Ok(deps.iter().filter_map(|unit| { if !unit.target.linkable() || unit.pkg.manifest().links().is_none() { return None @@ -666,7 +666,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // the documentation of the library being built. let mut ret = Vec::new(); for dep in deps { - let dep = try!(dep); + let dep = dep?; let lib = match dep.targets().iter().find(|t| t.is_lib()) { Some(lib) => lib, None => continue, @@ -856,14 +856,14 @@ fn env_args(config: &Config, // Then the target.*.rustflags value let target = build_config.requested_target.as_ref().unwrap_or(&build_config.host_triple); let key = format!("target.{}.{}", target, name); - if let Some(args) = try!(config.get_list(&key)) { + if let Some(args) = config.get_list(&key)? { let args = args.val.into_iter().map(|a| a.0); return Ok(args.collect()); } // Then the build.rustflags value let key = format!("build.{}", name); - if let Some(args) = try!(config.get_list(&key)) { + if let Some(args) = config.get_list(&key)? { let args = args.val.into_iter().map(|a| a.0); return Ok(args.collect()); } diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs index 54c688134be..375a6ea434c 100644 --- a/src/cargo/ops/cargo_rustc/custom_build.rs +++ b/src/cargo/ops/cargo_rustc/custom_build.rs @@ -69,13 +69,13 @@ pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) let (work_dirty, work_fresh) = if overridden { (Work::new(|_| Ok(())), Work::new(|_| Ok(()))) } else { - try!(build_work(cx, unit)) + build_work(cx, unit)? }; // Now that we've prep'd our work, build the work needed to manage the // fingerprint and then start returning that upwards. let (freshness, dirty, fresh) = - try!(fingerprint::prepare_build_cmd(cx, unit)); + fingerprint::prepare_build_cmd(cx, unit)?; Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness)) } @@ -97,7 +97,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // package's library profile. let profile = cx.lib_profile(unit.pkg.package_id()); let to_exec = to_exec.into_os_string(); - let mut cmd = try!(cx.compilation.host_process(to_exec, unit.pkg)); + let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?; cmd.env("OUT_DIR", &build_output) .env("CARGO_MANIFEST_DIR", unit.pkg.root()) .env("NUM_JOBS", &cx.jobs().to_string()) @@ -109,8 +109,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) .env("OPT_LEVEL", &profile.opt_level) .env("PROFILE", if cx.build_config.release { "release" } else { "debug" }) .env("HOST", cx.host_triple()) - .env("RUSTC", &try!(cx.config.rustc()).path) - .env("RUSTDOC", &*try!(cx.config.rustdoc())); + .env("RUSTC", &cx.config.rustc()?.path) + .env("RUSTDOC", &*cx.config.rustdoc()?); if let Some(links) = unit.pkg.manifest().links() { cmd.env("CARGO_MANIFEST_LINKS", links); @@ -150,7 +150,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // This information will be used at build-time later on to figure out which // sorts of variables need to be discovered at that time. let lib_deps = { - try!(cx.dep_run_custom_build(unit)).iter().filter_map(|unit| { + cx.dep_run_custom_build(unit)?.iter().filter_map(|unit| { if unit.profile.run_custom_build { Some((unit.pkg.manifest().links().unwrap().to_string(), unit.pkg.package_id().clone())) @@ -177,8 +177,8 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) }; cx.build_explicit_deps.insert(*unit, (output_file.clone(), rerun_if_changed)); - try!(fs::create_dir_all(&cx.layout(&host_unit).build(unit.pkg))); - try!(fs::create_dir_all(&cx.layout(unit).build(unit.pkg))); + fs::create_dir_all(&cx.layout(&host_unit).build(unit.pkg))?; + fs::create_dir_all(&cx.layout(unit).build(unit.pkg))?; // Prepare the unit of "dirty work" which will actually run the custom build // command. @@ -191,10 +191,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // If we have an old build directory, then just move it into place, // otherwise create it! if fs::metadata(&build_output).is_err() { - try!(fs::create_dir(&build_output).chain_error(|| { + fs::create_dir(&build_output).chain_error(|| { internal("failed to create script output directory for \ build command") - })); + })?; } // For all our native lib dependencies, pick up their metadata to pass @@ -205,10 +205,10 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) let build_state = build_state.outputs.lock().unwrap(); for (name, id) in lib_deps { let key = (id.clone(), kind); - let state = try!(build_state.get(&key).chain_error(|| { + let state = build_state.get(&key).chain_error(|| { internal(format!("failed to locate build state for env \ vars: {}/{:?}", id, kind)) - })); + })?; let data = &state.metadata; for &(ref key, ref value) in data.iter() { cmd.env(&format!("DEP_{}_{}", super::envify(&name), @@ -216,22 +216,22 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) } } if let Some(build_scripts) = build_scripts { - try!(super::add_plugin_deps(&mut cmd, &build_state, - &build_scripts)); + super::add_plugin_deps(&mut cmd, &build_state, + &build_scripts)?; } } // And now finally, run the build command itself! state.running(&cmd); - let output = try!(cmd.exec_with_streaming( + let output = cmd.exec_with_streaming( &mut |out_line| { state.stdout(out_line); Ok(()) }, &mut |err_line| { state.stderr(err_line); Ok(()) }, ).map_err(|mut e| { e.desc = format!("failed to run custom build command for `{}`\n{}", pkg_name, e.desc); Human(e) - })); - try!(paths::write(&output_file, &output.stdout)); + })?; + paths::write(&output_file, &output.stdout)?; // After the build command has finished running, we need to be sure to // remember all of its output so we can later discover precisely what it @@ -240,7 +240,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // This is also the location where we provide feedback into the build // state informing what variables were discovered via our script as // well. - let parsed_output = try!(BuildOutput::parse(&output.stdout, &pkg_name)); + let parsed_output = BuildOutput::parse(&output.stdout, &pkg_name)?; build_state.insert(id, kind, parsed_output); Ok(()) }); @@ -252,7 +252,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) let (id, pkg_name, build_state, output_file) = all; let output = match prev_output { Some(output) => output, - None => try!(BuildOutput::parse_file(&output_file, &pkg_name)), + None => BuildOutput::parse_file(&output_file, &pkg_name)?, }; build_state.insert(id, kind, output); Ok(()) @@ -294,7 +294,7 @@ impl BuildState { impl BuildOutput { pub fn parse_file(path: &Path, pkg_name: &str) -> CargoResult { - let contents = try!(paths::read_bytes(path)); + let contents = paths::read_bytes(path)?; BuildOutput::parse(&contents, pkg_name) } @@ -336,9 +336,9 @@ impl BuildOutput { match key { "rustc-flags" => { - let (libs, links) = try!( + let (libs, links) = BuildOutput::parse_rustc_flags(value, &whence) - ); + ?; library_links.extend(links.into_iter()); library_paths.extend(libs.into_iter()); } @@ -407,7 +407,7 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, -> CargoResult<()> { let mut ret = HashMap::new(); for unit in units { - try!(build(&mut ret, cx, unit)); + build(&mut ret, cx, unit)?; } cx.build_scripts.extend(ret.into_iter().map(|(k, v)| { (k, Arc::new(v)) @@ -431,8 +431,8 @@ pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { add_to_link(&mut ret, unit.pkg.package_id(), unit.kind); } - for unit in try!(cx.dep_targets(unit)).iter() { - let dep_scripts = try!(build(out, cx, unit)); + for unit in cx.dep_targets(unit)?.iter() { + let dep_scripts = build(out, cx, unit)?; if unit.target.for_host() { ret.plugins.extend(dep_scripts.to_link.iter() diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index 1f785609fd5..6cc8f88c105 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -53,7 +53,7 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, debug!("fingerprint at: {}", loc.display()); - let fingerprint = try!(calculate(cx, unit)); + let fingerprint = calculate(cx, unit)?; let compare = compare_old_fingerprint(&loc, &*fingerprint); log_compare(unit, &compare); @@ -70,10 +70,10 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, if compare.is_err() { let source_id = unit.pkg.package_id().source_id(); let sources = cx.packages.sources(); - let source = try!(sources.get(source_id).chain_error(|| { + let source = sources.get(source_id).chain_error(|| { internal("missing package source") - })); - try!(source.verify(unit.pkg.package_id())); + })?; + source.verify(unit.pkg.package_id())?; } let root = cx.out_dir(unit); @@ -82,7 +82,7 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, missing_outputs = !root.join(unit.target.crate_name()) .join("index.html").exists(); } else { - for (src, link_dst, _) in try!(cx.target_filenames(unit)) { + for (src, link_dst, _) in cx.target_filenames(unit)? { missing_outputs |= !src.exists(); if let Some(link_dst) = link_dst { missing_outputs |= !link_dst.exists(); @@ -148,9 +148,9 @@ impl Fingerprint { fn update_local(&self) -> CargoResult<()> { match self.local { LocalFingerprint::MtimeBased(ref slot, ref path) => { - let meta = try!(fs::metadata(path).chain_error(|| { + let meta = fs::metadata(path).chain_error(|| { internal(format!("failed to stat `{}`", path.display())) - })); + })?; let mtime = FileTime::from_last_modification_time(&meta); *slot.0.lock().unwrap() = Some(mtime); } @@ -245,19 +245,19 @@ impl hash::Hash for Fingerprint { impl Encodable for Fingerprint { fn encode(&self, e: &mut E) -> Result<(), E::Error> { e.emit_struct("Fingerprint", 6, |e| { - try!(e.emit_struct_field("rustc", 0, |e| self.rustc.encode(e))); - try!(e.emit_struct_field("target", 1, |e| self.target.encode(e))); - try!(e.emit_struct_field("profile", 2, |e| self.profile.encode(e))); - try!(e.emit_struct_field("local", 3, |e| self.local.encode(e))); - try!(e.emit_struct_field("features", 4, |e| { + e.emit_struct_field("rustc", 0, |e| self.rustc.encode(e))?; + e.emit_struct_field("target", 1, |e| self.target.encode(e))?; + e.emit_struct_field("profile", 2, |e| self.profile.encode(e))?; + e.emit_struct_field("local", 3, |e| self.local.encode(e))?; + e.emit_struct_field("features", 4, |e| { self.features.encode(e) - })); - try!(e.emit_struct_field("deps", 5, |e| { + })?; + e.emit_struct_field("deps", 5, |e| { self.deps.iter().map(|&(ref a, ref b)| { (a, b.hash()) }).collect::>().encode(e) - })); - try!(e.emit_struct_field("rustflags", 6, |e| self.rustflags.encode(e))); + })?; + e.emit_struct_field("rustflags", 6, |e| self.rustflags.encode(e))?; Ok(()) }) } @@ -270,15 +270,15 @@ impl Decodable for Fingerprint { } d.read_struct("Fingerprint", 6, |d| { Ok(Fingerprint { - rustc: try!(d.read_struct_field("rustc", 0, decode)), - target: try!(d.read_struct_field("target", 1, decode)), - profile: try!(d.read_struct_field("profile", 2, decode)), - local: try!(d.read_struct_field("local", 3, decode)), - features: try!(d.read_struct_field("features", 4, decode)), + rustc: d.read_struct_field("rustc", 0, decode)?, + target: d.read_struct_field("target", 1, decode)?, + profile: d.read_struct_field("profile", 2, decode)?, + local: d.read_struct_field("local", 3, decode)?, + features: d.read_struct_field("features", 4, decode)?, memoized_hash: Mutex::new(None), deps: { let decode = decode::, D>; - let v = try!(d.read_struct_field("deps", 5, decode)); + let v = d.read_struct_field("deps", 5, decode)?; v.into_iter().map(|(name, hash)| { (name, Arc::new(Fingerprint { rustc: 0, @@ -292,7 +292,7 @@ impl Decodable for Fingerprint { })) }).collect() }, - rustflags: try!(d.read_struct_field("rustflags", 6, decode)), + rustflags: d.read_struct_field("rustflags", 6, decode)?, }) }) } @@ -314,7 +314,7 @@ impl Encodable for MtimeSlot { impl Decodable for MtimeSlot { fn decode(e: &mut D) -> Result { - let kind: Option<(u64, u32)> = try!(Decodable::decode(e)); + let kind: Option<(u64, u32)> = Decodable::decode(e)?; Ok(MtimeSlot(Mutex::new(kind.map(|(s, n)| { FileTime::from_seconds_since_1970(s, n) })))) @@ -356,33 +356,33 @@ fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) // elsewhere. Also skip fingerprints of binaries because they don't actually // induce a recompile, they're just dependencies in the sense that they need // to be built. - let deps = try!(cx.dep_targets(unit)); - let deps = try!(deps.iter().filter(|u| { + let deps = cx.dep_targets(unit)?; + let deps = deps.iter().filter(|u| { !u.target.is_custom_build() && !u.target.is_bin() }).map(|unit| { calculate(cx, unit).map(|fingerprint| { (unit.pkg.package_id().to_string(), fingerprint) }) - }).collect::>>()); + }).collect::>>()?; // And finally, calculate what our own local fingerprint is let local = if use_dep_info(unit) { let dep_info = dep_info_loc(cx, unit); - let mtime = try!(dep_info_mtime_if_fresh(&dep_info)); + let mtime = dep_info_mtime_if_fresh(&dep_info)?; LocalFingerprint::MtimeBased(MtimeSlot(Mutex::new(mtime)), dep_info) } else { - let fingerprint = try!(pkg_fingerprint(cx, unit.pkg)); + let fingerprint = pkg_fingerprint(cx, unit.pkg)?; LocalFingerprint::Precalculated(fingerprint) }; let mut deps = deps; deps.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); let extra_flags = if unit.profile.doc { - try!(cx.rustdocflags_args(unit)) + cx.rustdocflags_args(unit)? } else { - try!(cx.rustflags_args(unit)) + cx.rustflags_args(unit)? }; let fingerprint = Arc::new(Fingerprint { - rustc: util::hash_u64(&try!(cx.config.rustc()).verbose_version), + rustc: util::hash_u64(&cx.config.rustc()?.verbose_version), target: util::hash_u64(&unit.target), profile: util::hash_u64(&unit.profile), features: format!("{:?}", features), @@ -447,7 +447,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) let &(ref output, ref deps) = &cx.build_explicit_deps[unit]; let local = if deps.is_empty() { - let s = try!(pkg_fingerprint(cx, unit.pkg)); + let s = pkg_fingerprint(cx, unit.pkg)?; LocalFingerprint::Precalculated(s) } else { let deps = deps.iter().map(|p| unit.pkg.root().join(p)); @@ -493,7 +493,7 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) let slot = MtimeSlot(Mutex::new(None)); fingerprint.local = LocalFingerprint::MtimeBased(slot, output_path); - try!(fingerprint.update_local()); + fingerprint.update_local()?; } } write_fingerprint(&loc, &fingerprint) @@ -505,9 +505,9 @@ pub fn prepare_build_cmd<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { let hash = fingerprint.hash(); debug!("write fingerprint: {}", loc.display()); - try!(paths::write(&loc, util::to_hex(hash).as_bytes())); - try!(paths::write(&loc.with_extension("json"), - json::encode(&fingerprint).unwrap().as_bytes())); + paths::write(&loc, util::to_hex(hash).as_bytes())?; + paths::write(&loc.with_extension("json"), + json::encode(&fingerprint).unwrap().as_bytes())?; Ok(()) } @@ -517,10 +517,10 @@ pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> { let new2 = new1.clone(); if fs::metadata(&new1).is_err() { - try!(fs::create_dir(&new1)); + fs::create_dir(&new1)?; } if fs::metadata(&new2).is_err() { - try!(fs::create_dir(&new2)); + fs::create_dir(&new2)?; } Ok(()) } @@ -537,17 +537,17 @@ pub fn dep_info_loc(cx: &Context, unit: &Unit) -> PathBuf { fn compare_old_fingerprint(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<()> { - let old_fingerprint_short = try!(paths::read(loc)); + let old_fingerprint_short = paths::read(loc)?; let new_hash = new_fingerprint.hash(); if util::to_hex(new_hash) == old_fingerprint_short { return Ok(()) } - let old_fingerprint_json = try!(paths::read(&loc.with_extension("json"))); - let old_fingerprint = try!(json::decode(&old_fingerprint_json).chain_error(|| { + let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; + let old_fingerprint = json::decode(&old_fingerprint_json).chain_error(|| { internal(format!("failed to deserialize json")) - })); + })?; new_fingerprint.compare(&old_fingerprint) } @@ -578,15 +578,15 @@ fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult> { if fs_try!(f.read_until(0, &mut cwd)) == 0 { return Ok(None) } - let cwd = try!(util::bytes2path(&cwd[..cwd.len()-1])); + let cwd = util::bytes2path(&cwd[..cwd.len()-1])?; let line = match f.lines().next() { Some(Ok(line)) => line, _ => return Ok(None), }; - let pos = try!(line.find(": ").chain_error(|| { + let pos = line.find(": ").chain_error(|| { internal(format!("dep-info not in an understood format: {}", dep_info.display())) - })); + })?; let deps = &line[pos + 2..]; let mut paths = Vec::new(); @@ -599,9 +599,9 @@ fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult> { while file.ends_with("\\") { file.pop(); file.push(' '); - file.push_str(try!(deps.next().chain_error(|| { + file.push_str(deps.next().chain_error(|| { internal(format!("malformed dep-info format, trailing \\")) - }))); + })?); } paths.push(cwd.join(&file)); } @@ -612,9 +612,9 @@ fn dep_info_mtime_if_fresh(dep_info: &Path) -> CargoResult> { fn pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult { let source_id = pkg.package_id().source_id(); let sources = cx.packages.sources(); - let source = try!(sources.get(source_id).chain_error(|| { + let source = sources.get(source_id).chain_error(|| { internal("missing package source") - })); + })?; source.fingerprint(pkg) } @@ -683,12 +683,12 @@ fn filename(cx: &Context, unit: &Unit) -> String { // next time. pub fn append_current_dir(path: &Path, cwd: &Path) -> CargoResult<()> { debug!("appending {} <- {}", path.display(), cwd.display()); - let mut f = try!(OpenOptions::new().read(true).write(true).open(path)); + let mut f = OpenOptions::new().read(true).write(true).open(path)?; let mut contents = Vec::new(); - try!(f.read_to_end(&mut contents)); - try!(f.seek(SeekFrom::Start(0))); - try!(f.write_all(try!(util::path2bytes(cwd)))); - try!(f.write_all(&[0])); - try!(f.write_all(&contents)); + f.read_to_end(&mut contents)?; + f.seek(SeekFrom::Start(0))?; + f.write_all(util::path2bytes(cwd)?)?; + f.write_all(&[0])?; + f.write_all(&contents)?; Ok(()) } diff --git a/src/cargo/ops/cargo_rustc/job.rs b/src/cargo/ops/cargo_rustc/job.rs index ae7ba303738..219a6d43749 100644 --- a/src/cargo/ops/cargo_rustc/job.rs +++ b/src/cargo/ops/cargo_rustc/job.rs @@ -38,7 +38,7 @@ impl Work { pub fn then(self, next: Work) -> Work { Work::new(move |state| { - try!(self.call(state)); + self.call(state)?; next.call(state) }) } diff --git a/src/cargo/ops/cargo_rustc/job_queue.rs b/src/cargo/ops/cargo_rustc/job_queue.rs index 39d1c45a785..e69bd33cd91 100644 --- a/src/cargo/ops/cargo_rustc/job_queue.rs +++ b/src/cargo/ops/cargo_rustc/job_queue.rs @@ -100,7 +100,7 @@ impl<'a> JobQueue<'a> { job: Job, fresh: Freshness) -> CargoResult<()> { let key = Key::new(unit); - let deps = try!(key.dependencies(cx)); + let deps = key.dependencies(cx)?; self.queue.queue(Fresh, key, Vec::new(), &deps).push((job, fresh)); *self.counts.entry(key.pkg).or_insert(0) += 1; Ok(()) @@ -141,7 +141,7 @@ impl<'a> JobQueue<'a> { while error.is_none() && self.active < self.jobs { if !queue.is_empty() { let (key, job, fresh) = queue.remove(0); - try!(self.run(key, fresh, job, cx.config, scope)); + self.run(key, fresh, job, cx.config, scope)?; } else if let Some((fresh, key, jobs)) = self.queue.dequeue() { let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| { f.combine(fresh) @@ -165,28 +165,28 @@ impl<'a> JobQueue<'a> { match msg { Message::Run(cmd) => { - try!(cx.config.shell().verbose(|c| c.status("Running", &cmd))); + cx.config.shell().verbose(|c| c.status("Running", &cmd))?; } Message::Stdout(out) => { if cx.config.extra_verbose() { - try!(writeln!(cx.config.shell().out(), "{}", out)); + writeln!(cx.config.shell().out(), "{}", out)?; } } Message::Stderr(err) => { if cx.config.extra_verbose() { - try!(writeln!(cx.config.shell().err(), "{}", err)); + writeln!(cx.config.shell().err(), "{}", err)?; } } Message::Finish(result) => { info!("end: {:?}", key); self.active -= 1; match result { - Ok(()) => try!(self.finish(key, cx)), + Ok(()) => self.finish(key, cx)?, Err(e) => { if self.active > 0 { - try!(cx.config.shell().say( + cx.config.shell().say( "Build failed, waiting for other \ - jobs to finish...", YELLOW)); + jobs to finish...", YELLOW)?; } if error.is_none() { error = Some(e); @@ -210,10 +210,10 @@ impl<'a> JobQueue<'a> { duration.subsec_nanos() / 10000000); if self.queue.is_empty() { if !self.is_doc_all { - try!(cx.config.shell().status("Finished", format!("{} [{}] target(s) in {}", + cx.config.shell().status("Finished", format!("{} [{}] target(s) in {}", build_type, opt_type, - time_elapsed))); + time_elapsed))?; } Ok(()) } else if let Some(e) = error { @@ -247,7 +247,7 @@ impl<'a> JobQueue<'a> { }); // Print out some nice progress information - try!(self.note_working_on(config, &key, fresh)); + self.note_working_on(config, &key, fresh)?; Ok(()) } @@ -257,7 +257,7 @@ impl<'a> JobQueue<'a> { let output = cx.build_state.outputs.lock().unwrap(); if let Some(output) = output.get(&(key.pkg.clone(), key.kind)) { for warning in output.warnings.iter() { - try!(cx.config.shell().warn(warning)); + cx.config.shell().warn(warning)?; } } } @@ -293,15 +293,15 @@ impl<'a> JobQueue<'a> { Dirty => { if key.profile.doc { self.documented.insert(key.pkg); - try!(config.shell().status("Documenting", key.pkg)); + config.shell().status("Documenting", key.pkg)?; } else { self.compiled.insert(key.pkg); - try!(config.shell().status("Compiling", key.pkg)); + config.shell().status("Compiling", key.pkg)?; } } Fresh if self.counts[key.pkg] == 0 => { self.compiled.insert(key.pkg); - try!(config.shell().verbose(|c| c.status("Fresh", key.pkg))); + config.shell().verbose(|c| c.status("Fresh", key.pkg))?; } Fresh => {} } @@ -322,12 +322,12 @@ impl<'a> Key<'a> { fn dependencies<'cfg>(&self, cx: &Context<'a, 'cfg>) -> CargoResult>> { let unit = Unit { - pkg: try!(cx.get_package(self.pkg)), + pkg: cx.get_package(self.pkg)?, target: self.target, profile: self.profile, kind: self.kind, }; - let targets = try!(cx.dep_targets(&unit)); + let targets = cx.dep_targets(&unit)?; Ok(targets.iter().filter_map(|unit| { // Binaries aren't actually needed to *compile* tests, just to run // them, so we don't include this dependency edge in the job graph. diff --git a/src/cargo/ops/cargo_rustc/layout.rs b/src/cargo/ops/cargo_rustc/layout.rs index 393b3defd08..cf0cec2df02 100644 --- a/src/cargo/ops/cargo_rustc/layout.rs +++ b/src/cargo/ops/cargo_rustc/layout.rs @@ -78,8 +78,7 @@ impl Layout { // the target triple as a Path and then just use the file stem as the // component for the directory name. if let Some(triple) = triple { - path.push(try!(Path::new(triple).file_stem() - .ok_or(human(format!("target was empty"))))); + path.push(Path::new(triple).file_stem().ok_or(human(format!("target was empty")))?); } path.push(dest); Layout::at(ws.config(), path) @@ -89,7 +88,7 @@ impl Layout { // For now we don't do any more finer-grained locking on the artifact // directory, so just lock the entire thing for the duration of this // compile. - let lock = try!(root.open_rw(".cargo-lock", config, "build directory")); + let lock = root.open_rw(".cargo-lock", config, "build directory")?; let root = root.into_path_unlocked(); Ok(Layout { @@ -105,20 +104,20 @@ impl Layout { pub fn prepare(&mut self) -> io::Result<()> { if fs::metadata(&self.root).is_err() { - try!(fs::create_dir_all(&self.root)); + fs::create_dir_all(&self.root)?; } - try!(mkdir(&self.deps)); - try!(mkdir(&self.native)); - try!(mkdir(&self.fingerprint)); - try!(mkdir(&self.examples)); - try!(mkdir(&self.build)); + mkdir(&self.deps)?; + mkdir(&self.native)?; + mkdir(&self.fingerprint)?; + mkdir(&self.examples)?; + mkdir(&self.build)?; return Ok(()); fn mkdir(dir: &Path) -> io::Result<()> { if fs::metadata(&dir).is_err() { - try!(fs::create_dir(dir)); + fs::create_dir(dir)?; } Ok(()) } diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index 435f28f5c2e..02e1088303b 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -81,15 +81,15 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, }) }).collect::>(); - let mut cx = try!(Context::new(ws, resolve, packages, config, - build_config, profiles)); + let mut cx = Context::new(ws, resolve, packages, config, + build_config, profiles)?; let mut queue = JobQueue::new(&cx); - try!(cx.prepare()); - try!(cx.probe_target_info(&units)); - try!(cx.build_used_in_plugin_map(&units)); - try!(custom_build::build_map(&mut cx, &units)); + cx.prepare()?; + cx.probe_target_info(&units)?; + cx.build_used_in_plugin_map(&units)?; + custom_build::build_map(&mut cx, &units)?; for unit in units.iter() { // Build up a list of pending jobs, each of which represent @@ -97,11 +97,11 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, // part of this, that's all done next as part of the `execute` // function which will run everything in order with proper // parallelism. - try!(compile(&mut cx, &mut queue, unit)); + compile(&mut cx, &mut queue, unit)?; } // Now that we've figured out everything that we're going to do, do it! - try!(queue.execute(&mut cx)); + queue.execute(&mut cx)?; for unit in units.iter() { let out_dir = cx.layout(unit).build_out(unit.pkg) @@ -110,7 +110,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, .or_insert(Vec::new()) .push(("OUT_DIR".to_string(), out_dir)); - for (dst, link_dst, _linkable) in try!(cx.target_filenames(unit)) { + for (dst, link_dst, _linkable) in cx.target_filenames(unit)? { let bindst = match link_dst { Some(link_dst) => link_dst, None => dst.clone(), @@ -130,7 +130,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, if !unit.target.is_lib() { continue } // Include immediate lib deps as well - for unit in try!(cx.dep_targets(unit)).iter() { + for unit in cx.dep_targets(unit)?.iter() { let pkgid = unit.pkg.package_id(); if !unit.target.is_lib() { continue } if unit.profile.doc { continue } @@ -138,7 +138,7 @@ pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>, continue } - let v = try!(cx.target_filenames(unit)); + let v = cx.target_filenames(unit)?; let v = v.into_iter().map(|(f, _, _)| { (unit.target.clone(), f) }).collect::>(); @@ -177,43 +177,43 @@ fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>, // we've got everything constructed. let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); - try!(fingerprint::prepare_init(cx, unit)); - try!(cx.links.validate(unit)); + fingerprint::prepare_init(cx, unit)?; + cx.links.validate(unit)?; let (dirty, fresh, freshness) = if unit.profile.run_custom_build { - try!(custom_build::prepare(cx, unit)) + custom_build::prepare(cx, unit)? } else { - let (freshness, dirty, fresh) = try!(fingerprint::prepare_target(cx, - unit)); + let (freshness, dirty, fresh) = fingerprint::prepare_target(cx, + unit)?; let work = if unit.profile.doc { - try!(rustdoc(cx, unit)) + rustdoc(cx, unit)? } else { - try!(rustc(cx, unit)) + rustc(cx, unit)? }; - let link_work1 = try!(link_targets(cx, unit)); - let link_work2 = try!(link_targets(cx, unit)); + let link_work1 = link_targets(cx, unit)?; + let link_work2 = link_targets(cx, unit)?; // Need to link targets on both the dirty and fresh let dirty = work.then(link_work1).then(dirty); let fresh = link_work2.then(fresh); (dirty, fresh, freshness) }; - try!(jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)); + jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness)?; drop(p); // Be sure to compile all dependencies of this target as well. - for unit in try!(cx.dep_targets(unit)).iter() { - try!(compile(cx, jobs, unit)); + for unit in cx.dep_targets(unit)?.iter() { + compile(cx, jobs, unit)?; } Ok(()) } fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { let crate_types = unit.target.rustc_crate_types(); - let mut rustc = try!(prepare_rustc(cx, crate_types, unit)); + let mut rustc = prepare_rustc(cx, crate_types, unit)?; let name = unit.pkg.name().to_string(); if !cx.show_warnings(unit.pkg.package_id()) { - if try!(cx.config.rustc()).cap_lints { + if cx.config.rustc()?.cap_lints { rustc.arg("--cap-lints").arg("allow"); } else { rustc.arg("-Awarnings"); @@ -221,7 +221,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { } let has_custom_args = unit.profile.rustc_args.is_some(); - let filenames = try!(cx.target_filenames(unit)); + let filenames = cx.target_filenames(unit)?; let root = cx.out_dir(unit); // Prepare the native lib state (extra -L and -l flags) @@ -246,7 +246,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { let dep_info_loc = fingerprint::dep_info_loc(cx, unit); let cwd = cx.config.cwd().to_path_buf(); - rustc.args(&try!(cx.rustflags_args(unit))); + rustc.args(&cx.rustflags_args(unit)?); let json_errors = cx.build_config.json_errors; let package_id = unit.pkg.package_id().clone(); let target = unit.target.clone(); @@ -258,23 +258,23 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { // located somewhere in there. if let Some(build_deps) = build_deps { let build_state = build_state.outputs.lock().unwrap(); - try!(add_native_deps(&mut rustc, &build_state, &build_deps, - pass_l_flag, ¤t_id)); - try!(add_plugin_deps(&mut rustc, &build_state, &build_deps)); + add_native_deps(&mut rustc, &build_state, &build_deps, + pass_l_flag, ¤t_id)?; + add_plugin_deps(&mut rustc, &build_state, &build_deps)?; } // FIXME(rust-lang/rust#18913): we probably shouldn't have to do // this manually for &(ref dst, ref _link_dst, _linkable) in filenames.iter() { if fs::metadata(&dst).is_ok() { - try!(fs::remove_file(&dst).chain_error(|| { + fs::remove_file(&dst).chain_error(|| { human(format!("Could not remove file: {}.", dst.display())) - })); + })?; } } state.running(&rustc); - try!(if json_errors { + if json_errors { rustc.exec_with_streaming( &mut |line| if !line.is_empty() { Err(internal(&format!("compiler stdout is not empty: `{}`", line))) @@ -282,9 +282,9 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { Ok(()) }, &mut |line| { - let compiler_message = try!(json::Json::from_str(line).map_err(|_| { + let compiler_message = json::Json::from_str(line).map_err(|_| { internal(&format!("compiler produced invalid json: `{}`", line)) - })); + })?; machine_message::FromCompiler::new( &package_id, @@ -299,7 +299,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { rustc.exec() }.chain_error(|| { human(format!("Could not compile `{}`.", name)) - })); + })?; if do_rename && real_name != crate_name { let dst = &filenames[0].0; @@ -307,19 +307,19 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { .to_str().unwrap() .replace(&real_name, &crate_name)); if !has_custom_args || src.exists() { - try!(fs::rename(&src, &dst).chain_error(|| { + fs::rename(&src, &dst).chain_error(|| { internal(format!("could not rename crate {:?}", src)) - })); + })?; } } if !has_custom_args || fs::metadata(&rustc_dep_info_loc).is_ok() { info!("Renaming dep_info {:?} to {:?}", rustc_dep_info_loc, dep_info_loc); - try!(fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| { + fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| { internal(format!("could not rename dep info: {:?}", rustc_dep_info_loc)) - })); - try!(fingerprint::append_current_dir(&dep_info_loc, &cwd)); + })?; + fingerprint::append_current_dir(&dep_info_loc, &cwd)?; } Ok(()) @@ -333,10 +333,10 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { pass_l_flag: bool, current_id: &PackageId) -> CargoResult<()> { for key in build_scripts.to_link.iter() { - let output = try!(build_state.get(key).chain_error(|| { + let output = build_state.get(key).chain_error(|| { internal(format!("couldn't find build state for {}/{:?}", key.0, key.1)) - })); + })?; for path in output.library_paths.iter() { rustc.arg("-L").arg(path); } @@ -358,7 +358,7 @@ fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult { /// Link the compiled target (often of form foo-{metadata_hash}) to the /// final target. This must happen during both "Fresh" and "Compile" fn link_targets(cx: &mut Context, unit: &Unit) -> CargoResult { - let filenames = try!(cx.target_filenames(unit)); + let filenames = cx.target_filenames(unit)?; Ok(Work::new(move |_| { // If we're a "root crate", e.g. the target of this compilation, then we // hard link our outputs out of the `deps` directory into the directory @@ -375,11 +375,11 @@ fn link_targets(cx: &mut Context, unit: &Unit) -> CargoResult { debug!("linking {} to {}", src.display(), dst.display()); if dst.exists() { - try!(fs::remove_file(&dst).chain_error(|| { + fs::remove_file(&dst).chain_error(|| { human(format!("failed to remove: {}", dst.display())) - })); + })?; } - try!(fs::hard_link(&src, &dst) + fs::hard_link(&src, &dst) .or_else(|err| { debug!("hard link failed {}. falling back to fs::copy", err); fs::copy(&src, &dst).map(|_| ()) @@ -387,7 +387,7 @@ fn link_targets(cx: &mut Context, unit: &Unit) -> CargoResult { .chain_error(|| { human(format!("failed to link or copy `{}` to `{}`", src.display(), dst.display())) - })); + })?; } Ok(()) })) @@ -409,14 +409,14 @@ fn add_plugin_deps(rustc: &mut ProcessBuilder, let mut search_path = env::split_paths(&search_path).collect::>(); for id in build_scripts.plugins.iter() { let key = (id.clone(), Kind::Host); - let output = try!(build_state.get(&key).chain_error(|| { + let output = build_state.get(&key).chain_error(|| { internal(format!("couldn't find libs for plugin dep {}", id)) - })); + })?; for path in output.library_paths.iter() { search_path.push(path.clone()); } } - let search_path = try!(join_paths(&search_path, var)); + let search_path = join_paths(&search_path, var)?; rustc.env(var, &search_path); Ok(()) } @@ -424,16 +424,16 @@ fn add_plugin_deps(rustc: &mut ProcessBuilder, fn prepare_rustc(cx: &Context, crate_types: Vec<&str>, unit: &Unit) -> CargoResult { - let mut base = try!(cx.compilation.rustc_process(unit.pkg)); + let mut base = cx.compilation.rustc_process(unit.pkg)?; build_base_args(cx, &mut base, unit, &crate_types); build_plugin_args(&mut base, cx, unit); - try!(build_deps_args(&mut base, cx, unit)); + build_deps_args(&mut base, cx, unit)?; Ok(base) } fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult { - let mut rustdoc = try!(cx.compilation.rustdoc_process(unit.pkg)); + let mut rustdoc = cx.compilation.rustdoc_process(unit.pkg)?; rustdoc.arg(&root_path(cx, unit)) .cwd(cx.config.cwd()) .arg("--crate-name").arg(&unit.target.crate_name()); @@ -449,7 +449,7 @@ fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult { // Create the documentation directory ahead of time as rustdoc currently has // a bug where concurrent invocations will race to create this directory if // it doesn't already exist. - try!(fs::create_dir_all(&doc_dir)); + fs::create_dir_all(&doc_dir)?; rustdoc.arg("-o").arg(doc_dir); @@ -463,13 +463,13 @@ fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult { rustdoc.args(args); } - try!(build_deps_args(&mut rustdoc, cx, unit)); + build_deps_args(&mut rustdoc, cx, unit)?; if unit.pkg.has_custom_build() { rustdoc.env("OUT_DIR", &cx.layout(unit).build_out(unit.pkg)); } - rustdoc.args(&try!(cx.rustdocflags_args(unit))); + rustdoc.args(&cx.rustdocflags_args(unit)?); let name = unit.pkg.name().to_string(); let build_state = cx.build_state.clone(); @@ -658,9 +658,9 @@ fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context, unit: &Unit) cmd.env("OUT_DIR", &layout.build_out(unit.pkg)); } - for unit in try!(cx.dep_targets(unit)).iter() { + for unit in cx.dep_targets(unit)?.iter() { if unit.target.linkable() && !unit.profile.doc { - try!(link_to(cmd, cx, unit)); + link_to(cmd, cx, unit)?; } } @@ -668,7 +668,7 @@ fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context, unit: &Unit) fn link_to(cmd: &mut ProcessBuilder, cx: &Context, unit: &Unit) -> CargoResult<()> { - for (dst, _link_dst, linkable) in try!(cx.target_filenames(unit)) { + for (dst, _link_dst, linkable) in cx.target_filenames(unit)? { if !linkable { continue } diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs index f382a1dd128..6c267efb60b 100644 --- a/src/cargo/ops/cargo_test.rs +++ b/src/cargo/ops/cargo_test.rs @@ -14,15 +14,15 @@ pub struct TestOptions<'a> { pub fn run_tests(ws: &Workspace, options: &TestOptions, test_args: &[String]) -> CargoResult> { - let compilation = try!(compile_tests(ws, options)); + let compilation = compile_tests(ws, options)?; if options.no_run { return Ok(None) } let mut errors = if options.only_doc { - try!(run_doc_tests(options, test_args, &compilation)) + run_doc_tests(options, test_args, &compilation)? } else { - try!(run_unit_tests(options, test_args, &compilation)) + run_unit_tests(options, test_args, &compilation)? }; // If we have an error and want to fail fast, return @@ -39,7 +39,7 @@ pub fn run_tests(ws: &Workspace, } } - errors.extend(try!(run_doc_tests(options, test_args, &compilation))); + errors.extend(run_doc_tests(options, test_args, &compilation)?); if errors.is_empty() { Ok(None) } else { @@ -52,12 +52,12 @@ pub fn run_benches(ws: &Workspace, args: &[String]) -> CargoResult> { let mut args = args.to_vec(); args.push("--bench".to_string()); - let compilation = try!(compile_tests(ws, options)); + let compilation = compile_tests(ws, options)?; if options.no_run { return Ok(None) } - let errors = try!(run_unit_tests(options, &args, &compilation)); + let errors = run_unit_tests(options, &args, &compilation)?; match errors.len() { 0 => Ok(None), _ => Ok(Some(CargoTestError::new(errors))), @@ -67,7 +67,7 @@ pub fn run_benches(ws: &Workspace, fn compile_tests<'a>(ws: &Workspace<'a>, options: &TestOptions<'a>) -> CargoResult> { - let mut compilation = try!(ops::compile(ws, &options.compile_opts)); + let mut compilation = ops::compile(ws, &options.compile_opts)?; compilation.tests.sort_by(|a, b| { (a.0.package_id(), &a.1).cmp(&(b.0.package_id(), &b.1)) }); @@ -89,14 +89,14 @@ fn run_unit_tests(options: &TestOptions, Some(path) => path, None => &**exe, }; - let mut cmd = try!(compilation.target_process(exe, pkg)); + let mut cmd = compilation.target_process(exe, pkg)?; cmd.args(test_args); - try!(config.shell().concise(|shell| { + config.shell().concise(|shell| { shell.status("Running", to_display.display().to_string()) - })); - try!(config.shell().verbose(|shell| { + })?; + config.shell().verbose(|shell| { shell.status("Running", cmd.to_string()) - })); + })?; if let Err(e) = cmd.exec() { errors.push(e); @@ -116,7 +116,7 @@ fn run_doc_tests(options: &TestOptions, let config = options.compile_opts.config; // We don't build/rust doctests if target != host - if try!(config.rustc()).host != compilation.target { + if config.rustc()?.host != compilation.target { return Ok(errors); } @@ -127,8 +127,8 @@ fn run_doc_tests(options: &TestOptions, for (package, tests) in libs { for (lib, name, crate_name) in tests { - try!(config.shell().status("Doc-tests", name)); - let mut p = try!(compilation.rustdoc_process(package)); + config.shell().status("Doc-tests", name)?; + let mut p = compilation.rustdoc_process(package)?; p.arg("--test").arg(lib) .arg("--crate-name").arg(&crate_name); @@ -174,9 +174,9 @@ fn run_doc_tests(options: &TestOptions, } } - try!(config.shell().verbose(|shell| { + config.shell().verbose(|shell| { shell.status("Running", p.to_string()) - })); + })?; if let Err(e) = p.exec() { errors.push(e); if !options.no_fail_fast { diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index ac9fcbddf31..30eec985a28 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -14,19 +14,19 @@ pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult> { } let root = Filesystem::new(ws.root().to_path_buf()); - let mut f = try!(root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")); + let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?; let mut s = String::new(); - try!(f.read_to_string(&mut s).chain_error(|| { + f.read_to_string(&mut s).chain_error(|| { human(format!("failed to read file: {}", f.path().display())) - })); + })?; (|| { - let table = try!(cargo_toml::parse(&s, f.path(), ws.config())); + let table = cargo_toml::parse(&s, f.path(), ws.config())?; let table = toml::Value::Table(table); let mut d = toml::Decoder::new(table); - let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d)); - Ok(Some(try!(v.into_resolve(ws)))) + let v: resolver::EncodableResolve = Decodable::decode(&mut d)?; + Ok(Some(v.into_resolve(ws)?)) }).chain_error(|| { human(format!("failed to parse lock file at: {}", f.path().display())) }) @@ -38,7 +38,7 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"); let orig = orig.and_then(|mut f| { let mut s = String::new(); - try!(f.read_to_string(&mut s)); + f.read_to_string(&mut s)?; Ok(s) }); @@ -102,8 +102,8 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> // Ok, if that didn't work just write it out ws_root.open_rw("Cargo.lock", ws.config(), "Cargo.lock file").and_then(|mut f| { - try!(f.file().set_len(0)); - try!(f.write_all(out.as_bytes())); + f.file().set_len(0)?; + f.write_all(out.as_bytes())?; Ok(()) }).chain_error(|| { human(format!("failed to write {}", diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 5866242fdab..d937976f867 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -40,32 +40,32 @@ pub struct PublishOpts<'cfg> { } pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { - let pkg = try!(ws.current()); + let pkg = ws.current()?; if !pkg.publish() { bail!("some crates cannot be published.\n\ `{}` is marked as unpublishable", pkg.name()); } - let (mut registry, reg_id) = try!(registry(opts.config, + let (mut registry, reg_id) = registry(opts.config, opts.token.clone(), - opts.index.clone())); - try!(verify_dependencies(&pkg, ®_id)); + opts.index.clone())?; + verify_dependencies(&pkg, ®_id)?; // Prepare a tarball, with a non-surpressable warning if metadata // is missing since this is being put online. - let tarball = try!(ops::package(ws, &ops::PackageOpts { + let tarball = ops::package(ws, &ops::PackageOpts { config: opts.config, verify: opts.verify, list: false, check_metadata: true, allow_dirty: opts.allow_dirty, jobs: opts.jobs, - })).unwrap(); + })?.unwrap(); // Upload said tarball to the specified destination - try!(opts.config.shell().status("Uploading", pkg.package_id().to_string())); - try!(transmit(opts.config, &pkg, tarball.file(), &mut registry, opts.dry_run)); + opts.config.shell().status("Uploading", pkg.package_id().to_string())?; + transmit(opts.config, &pkg, tarball.file(), &mut registry, opts.dry_run)?; Ok(()) } @@ -114,7 +114,7 @@ fn transmit(config: &Config, ref keywords, ref readme, ref repository, ref license, ref license_file, } = *manifest.metadata(); let readme = match *readme { - Some(ref readme) => Some(try!(paths::read(&pkg.root().join(readme)))), + Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?), None => None, }; match *license_file { @@ -128,7 +128,7 @@ fn transmit(config: &Config, // Do not upload if performing a dry run if dry_run { - try!(config.shell().warn("aborting upload due to dry run")); + config.shell().warn("aborting upload due to dry run")?; return Ok(()); } @@ -152,8 +152,8 @@ fn transmit(config: &Config, } pub fn registry_configuration(config: &Config) -> CargoResult { - let index = try!(config.get_string("registry.index")).map(|p| p.val); - let token = try!(config.get_string("registry.token")).map(|p| p.val); + let index = config.get_string("registry.index")?.map(|p| p.val); + let token = config.get_string("registry.token")?.map(|p| p.val); Ok(RegistryConfig { index: index, token: token }) } @@ -164,20 +164,20 @@ pub fn registry(config: &Config, let RegistryConfig { token: token_config, index: _index_config, - } = try!(registry_configuration(config)); + } = registry_configuration(config)?; let token = token.or(token_config); let sid = match index { - Some(index) => SourceId::for_registry(&try!(index.to_url())), - None => try!(SourceId::crates_io(config)), + Some(index) => SourceId::for_registry(&index.to_url()?), + None => SourceId::crates_io(config)?, }; let api_host = { let mut src = RegistrySource::remote(&sid, config); - try!(src.update().chain_error(|| { + src.update().chain_error(|| { human(format!("failed to update {}", sid)) - })); - (try!(src.config())).unwrap().api + })?; + (src.config()?).unwrap().api }; - let handle = try!(http_handle(config)); + let handle = http_handle(config)?; Ok((Registry::new_handle(api_host, token, handle), sid)) } @@ -193,18 +193,18 @@ pub fn http_handle(config: &Config) -> CargoResult { // connect phase as well as a "low speed" timeout so if we don't receive // many bytes in a large-ish period of time then we time out. let mut handle = Easy::new(); - try!(handle.connect_timeout(Duration::new(30, 0))); - try!(handle.low_speed_limit(10 /* bytes per second */)); - try!(handle.low_speed_time(Duration::new(30, 0))); - if let Some(proxy) = try!(http_proxy(config)) { - try!(handle.proxy(&proxy)); + handle.connect_timeout(Duration::new(30, 0))?; + handle.low_speed_limit(10 /* bytes per second */)?; + handle.low_speed_time(Duration::new(30, 0))?; + if let Some(proxy) = http_proxy(config)? { + handle.proxy(&proxy)?; } - if let Some(cainfo) = try!(config.get_path("http.cainfo")) { - try!(handle.cainfo(&cainfo.val)); + if let Some(cainfo) = config.get_path("http.cainfo")? { + handle.cainfo(&cainfo.val)?; } - if let Some(timeout) = try!(http_timeout(config)) { - try!(handle.connect_timeout(Duration::new(timeout as u64, 0))); - try!(handle.low_speed_time(Duration::new(timeout as u64, 0))); + if let Some(timeout) = http_timeout(config)? { + handle.connect_timeout(Duration::new(timeout as u64, 0))?; + handle.low_speed_time(Duration::new(timeout as u64, 0))?; } Ok(handle) } @@ -214,7 +214,7 @@ pub fn http_handle(config: &Config) -> CargoResult { /// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified /// via environment variables are picked up by libcurl. fn http_proxy(config: &Config) -> CargoResult> { - match try!(config.get_string("http.proxy")) { + match config.get_string("http.proxy")? { Some(s) => return Ok(Some(s.val)), None => {} } @@ -241,7 +241,7 @@ fn http_proxy(config: &Config) -> CargoResult> { /// * https_proxy env var /// * HTTPS_PROXY env var pub fn http_proxy_exists(config: &Config) -> CargoResult { - if try!(http_proxy(config)).is_some() { + if http_proxy(config)?.is_some() { Ok(true) } else { Ok(["http_proxy", "HTTP_PROXY", @@ -250,7 +250,7 @@ pub fn http_proxy_exists(config: &Config) -> CargoResult { } pub fn http_timeout(config: &Config) -> CargoResult> { - match try!(config.get_i64("http.timeout")) { + match config.get_i64("http.timeout")? { Some(s) => return Ok(Some(s.val)), None => {} } @@ -258,7 +258,7 @@ pub fn http_timeout(config: &Config) -> CargoResult> { } pub fn registry_login(config: &Config, token: String) -> CargoResult<()> { - let RegistryConfig { index, token: _ } = try!(registry_configuration(config)); + let RegistryConfig { index, token: _ } = registry_configuration(config)?; let mut map = HashMap::new(); let p = config.cwd().to_path_buf(); match index { @@ -286,23 +286,23 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { let name = match opts.krate { Some(ref name) => name.clone(), None => { - let manifest_path = try!(find_root_manifest_for_wd(None, config.cwd())); - let pkg = try!(Package::for_path(&manifest_path, config)); + let manifest_path = find_root_manifest_for_wd(None, config.cwd())?; + let pkg = Package::for_path(&manifest_path, config)?; pkg.name().to_string() } }; - let (mut registry, _) = try!(registry(config, opts.token.clone(), - opts.index.clone())); + let (mut registry, _) = registry(config, opts.token.clone(), + opts.index.clone())?; match opts.to_add { Some(ref v) => { let v = v.iter().map(|s| &s[..]).collect::>(); - try!(config.shell().status("Owner", format!("adding {:?} to crate {}", - v, name))); - try!(registry.add_owners(&name, &v).map_err(|e| { + config.shell().status("Owner", format!("adding {:?} to crate {}", + v, name))?; + registry.add_owners(&name, &v).map_err(|e| { human(format!("failed to add owners to crate {}: {}", name, e)) - })); + })?; } None => {} } @@ -310,19 +310,19 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { match opts.to_remove { Some(ref v) => { let v = v.iter().map(|s| &s[..]).collect::>(); - try!(config.shell().status("Owner", format!("removing {:?} from crate {}", - v, name))); - try!(registry.remove_owners(&name, &v).map_err(|e| { + config.shell().status("Owner", format!("removing {:?} from crate {}", + v, name))?; + registry.remove_owners(&name, &v).map_err(|e| { human(format!("failed to remove owners from crate {}: {}", name, e)) - })); + })?; } None => {} } if opts.list { - let owners = try!(registry.list_owners(&name).map_err(|e| { + let owners = registry.list_owners(&name).map_err(|e| { human(format!("failed to list owners of crate {}: {}", name, e)) - })); + })?; for owner in owners.iter() { print!("{}", owner.login); match (owner.name.as_ref(), owner.email.as_ref()) { @@ -346,8 +346,8 @@ pub fn yank(config: &Config, let name = match krate { Some(name) => name, None => { - let manifest_path = try!(find_root_manifest_for_wd(None, config.cwd())); - let pkg = try!(Package::for_path(&manifest_path, config)); + let manifest_path = find_root_manifest_for_wd(None, config.cwd())?; + let pkg = Package::for_path(&manifest_path, config)?; pkg.name().to_string() } }; @@ -356,18 +356,18 @@ pub fn yank(config: &Config, None => bail!("a version must be specified to yank") }; - let (mut registry, _) = try!(registry(config, token, index)); + let (mut registry, _) = registry(config, token, index)?; if undo { - try!(config.shell().status("Unyank", format!("{}:{}", name, version))); - try!(registry.unyank(&name, &version).map_err(|e| { + config.shell().status("Unyank", format!("{}:{}", name, version))?; + registry.unyank(&name, &version).map_err(|e| { human(format!("failed to undo a yank: {}", e)) - })); + })?; } else { - try!(config.shell().status("Yank", format!("{}:{}", name, version))); - try!(registry.yank(&name, &version).map_err(|e| { + config.shell().status("Yank", format!("{}:{}", name, version))?; + registry.yank(&name, &version).map_err(|e| { human(format!("failed to yank: {}", e)) - })); + })?; } Ok(()) @@ -385,10 +385,10 @@ pub fn search(query: &str, } } - let (mut registry, _) = try!(registry(config, None, index)); - let (crates, total_crates) = try!(registry.search(query, limit).map_err(|e| { + let (mut registry, _) = registry(config, None, index)?; + let (crates, total_crates) = registry.search(query, limit).map_err(|e| { human(format!("failed to retrieve search results from the registry: {}", e)) - })); + })?; let list_items = crates.iter() .map(|krate| ( @@ -411,25 +411,25 @@ pub fn search(query: &str, } None => name }; - try!(config.shell().say(line, BLACK)); + config.shell().say(line, BLACK)?; } let search_max_limit = 100; if total_crates > limit as u32 && limit < search_max_limit { - try!(config.shell().say( + config.shell().say( format!("... and {} crates more (use --limit N to see more)", total_crates - limit as u32), BLACK) - ); + ?; } else if total_crates > limit as u32 && limit >= search_max_limit { - try!(config.shell().say( + config.shell().say( format!( "... and {} crates more (go to http://crates.io/search?q={} to see more)", total_crates - limit as u32, percent_encode(query.as_bytes(), QUERY_ENCODE_SET) ), BLACK) - ); + ?; } Ok(()) diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 0bde351e287..335c06b88d7 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -13,14 +13,14 @@ use util::CargoResult; /// lockfile. pub fn resolve_ws(registry: &mut PackageRegistry, ws: &Workspace) -> CargoResult { - let prev = try!(ops::load_pkg_lockfile(ws)); - let resolve = try!(resolve_with_previous(registry, ws, - Method::Everything, - prev.as_ref(), None, &[])); + let prev = ops::load_pkg_lockfile(ws)?; + let resolve = resolve_with_previous(registry, ws, + Method::Everything, + prev.as_ref(), None, &[])?; // Avoid writing a lockfile if we are `cargo install`ing a non local package. if ws.current_opt().map(|pkg| pkg.package_id().source_id().is_path()).unwrap_or(true) { - try!(ops::write_pkg_lockfile(ws, &resolve)); + ops::write_pkg_lockfile(ws, &resolve)?; } Ok(resolve) } @@ -88,26 +88,51 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, let mut summaries = Vec::new(); for member in ws.members() { - try!(registry.add_sources(&[member.package_id().source_id() - .clone()])); + registry.add_sources(&[member.package_id().source_id().clone()])?; + let method_to_resolve = match method { + // When everything for a workspace we want to be sure to resolve all + // members in the workspace, so propagate the `Method::Everything`. + Method::Everything => Method::Everything, - // If we're resolving everything then we include all members of the - // workspace. If we want a specific set of requirements and we're - // compiling only a single workspace crate then resolve only it. This - // case should only happen after we have a previous resolution, however, - // so assert that the previous exists. - if let Method::Required { .. } = method { - assert!(previous.is_some()); - if let Some(current) = ws.current_opt() { - if member.package_id() != current.package_id() && - !specs.iter().any(|spec| spec.matches(member.package_id())) { - continue; + // If we're not resolving everything though then the workspace is + // already resolved and now we're drilling down from that to the + // exact crate graph we're going to build. Here we don't necessarily + // want to keep around all workspace crates as they may not all be + // built/tested. + // + // Additionally, the `method` specified represents command line + // flags, which really only matters for the current package + // (determined by the cwd). If other packages are specified (via + // `-p`) then the command line flags like features don't apply to + // them. + // + // As a result, if this `member` is the current member of the + // workspace, then we use `method` specified. Otherwise we use a + // base method with no features specified but using default features + // for any other packages specified with `-p`. + Method::Required { dev_deps, .. } => { + assert!(previous.is_some()); + let base = Method::Required { + dev_deps: dev_deps, + features: &[], + uses_default_features: true, + }; + let member_id = member.package_id(); + match ws.current_opt() { + Some(current) if member_id == current.package_id() => method, + _ => { + if specs.iter().any(|spec| spec.matches(member_id)) { + base + } else { + continue + } + } } } - } + }; let summary = registry.lock(member.summary().clone()); - summaries.push((summary, method)); + summaries.push((summary, method_to_resolve)); } let root_replace = ws.root_replace(); @@ -128,9 +153,9 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, None => root_replace.to_vec(), }; - let mut resolved = try!(resolver::resolve(&summaries, &replace, registry)); + let mut resolved = resolver::resolve(&summaries, &replace, registry)?; if let Some(previous) = previous { - try!(resolved.merge_from(previous)); + resolved.merge_from(previous)?; } return Ok(resolved); diff --git a/src/cargo/sources/config.rs b/src/cargo/sources/config.rs index ac254f147ec..80a13318f65 100644 --- a/src/cargo/sources/config.rs +++ b/src/cargo/sources/config.rs @@ -40,10 +40,10 @@ struct SourceConfig { impl<'cfg> SourceConfigMap<'cfg> { pub fn new(config: &'cfg Config) -> CargoResult> { - let mut base = try!(SourceConfigMap::empty(config)); - if let Some(table) = try!(config.get_table("source")) { + let mut base = SourceConfigMap::empty(config)?; + if let Some(table) = config.get_table("source")? { for (key, value) in table.val.iter() { - try!(base.add_config(key, value)); + base.add_config(key, value)?; } } Ok(base) @@ -56,7 +56,7 @@ impl<'cfg> SourceConfigMap<'cfg> { config: config, }; base.add("crates-io", SourceConfig { - id: try!(SourceId::crates_io(config)), + id: SourceId::crates_io(config)?, replace_with: None, }); Ok(base) @@ -126,40 +126,40 @@ a lock file compatible with `{orig}` cannot be generated in this situation } fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> { - let (table, _path) = try!(cfg.table(&format!("source.{}", name))); + let (table, _path) = cfg.table(&format!("source.{}", name))?; let mut srcs = Vec::new(); if let Some(val) = table.get("registry") { - let url = try!(url(val, &format!("source.{}.registry", name))); + let url = url(val, &format!("source.{}.registry", name))?; srcs.push(SourceId::for_registry(&url)); } if let Some(val) = table.get("local-registry") { - let (s, path) = try!(val.string(&format!("source.{}.local-registry", - name))); + let (s, path) = val.string(&format!("source.{}.local-registry", + name))?; let mut path = path.to_path_buf(); path.pop(); path.pop(); path.push(s); - srcs.push(try!(SourceId::for_local_registry(&path))); + srcs.push(SourceId::for_local_registry(&path)?); } if let Some(val) = table.get("directory") { - let (s, path) = try!(val.string(&format!("source.{}.directory", - name))); + let (s, path) = val.string(&format!("source.{}.directory", + name))?; let mut path = path.to_path_buf(); path.pop(); path.pop(); path.push(s); - srcs.push(try!(SourceId::for_directory(&path))); + srcs.push(SourceId::for_directory(&path)?); } if name == "crates-io" && srcs.is_empty() { - srcs.push(try!(SourceId::crates_io(self.config))); + srcs.push(SourceId::crates_io(self.config)?); } let mut srcs = srcs.into_iter(); - let src = try!(srcs.next().chain_error(|| { + let src = srcs.next().chain_error(|| { human(format!("no source URL specified for `source.{}`, need \ either `registry` or `local-registry` defined", name)) - })); + })?; if srcs.next().is_some() { return Err(human(format!("more than one source URL specified for \ `source.{}`", name))) @@ -167,8 +167,8 @@ a lock file compatible with `{orig}` cannot be generated in this situation let mut replace_with = None; if let Some(val) = table.get("replace-with") { - let (s, path) = try!(val.string(&format!("source.{}.replace-with", - name))); + let (s, path) = val.string(&format!("source.{}.replace-with", + name))?; replace_with = Some((s.to_string(), path.to_path_buf())); } @@ -180,7 +180,7 @@ a lock file compatible with `{orig}` cannot be generated in this situation return Ok(()); fn url(cfg: &ConfigValue, key: &str) -> CargoResult { - let (url, path) = try!(cfg.string(key)); + let (url, path) = cfg.string(key)?; url.to_url().chain_error(|| { human(format!("configuration key `{}` specified an invalid \ URL (in {})", key, path.display())) diff --git a/src/cargo/sources/directory.rs b/src/cargo/sources/directory.rs index 84a9501a03b..fc7abf56c73 100644 --- a/src/cargo/sources/directory.rs +++ b/src/cargo/sources/directory.rs @@ -59,34 +59,34 @@ impl<'cfg> Registry for DirectorySource<'cfg> { impl<'cfg> Source for DirectorySource<'cfg> { fn update(&mut self) -> CargoResult<()> { self.packages.clear(); - let entries = try!(self.root.read_dir().chain_error(|| { + let entries = self.root.read_dir().chain_error(|| { human(format!("failed to read root of directory source: {}", self.root.display())) - })); + })?; for entry in entries { - let entry = try!(entry); + let entry = entry?; let path = entry.path(); let mut src = PathSource::new(&path, &self.id, self.config); - try!(src.update()); - let pkg = try!(src.root_package()); + src.update()?; + let pkg = src.root_package()?; let cksum_file = path.join(".cargo-checksum.json"); - let cksum = try!(paths::read(&path.join(cksum_file)).chain_error(|| { + let cksum = paths::read(&path.join(cksum_file)).chain_error(|| { human(format!("failed to load checksum `.cargo-checksum.json` \ of {} v{}", pkg.package_id().name(), pkg.package_id().version())) - })); - let cksum: Checksum = try!(json::decode(&cksum).chain_error(|| { + })?; + let cksum: Checksum = json::decode(&cksum).chain_error(|| { human(format!("failed to decode `.cargo-checksum.json` of \ {} v{}", pkg.package_id().name(), pkg.package_id().version())) - })); + })?; let mut manifest = pkg.manifest().clone(); let summary = manifest.summary().clone(); @@ -120,10 +120,10 @@ impl<'cfg> Source for DirectorySource<'cfg> { let mut h = Sha256::new(); let file = pkg.root().join(file); - try!((|| -> CargoResult<()> { - let mut f = try!(File::open(&file)); + (|| -> CargoResult<()> { + let mut f = File::open(&file)?; loop { - match try!(f.read(&mut buf)) { + match f.read(&mut buf)? { 0 => return Ok(()), n => h.update(&buf[..n]), } @@ -131,7 +131,7 @@ impl<'cfg> Source for DirectorySource<'cfg> { }).chain_error(|| { human(format!("failed to calculate checksum of: {}", file.display())) - })); + })?; let actual = h.finish().to_hex(); if &*actual != cksum { diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index b0f1053ef1d..321fe4dfcd0 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -50,7 +50,7 @@ impl<'cfg> GitSource<'cfg> { pub fn read_packages(&mut self) -> CargoResult> { if self.path_source.is_none() { - try!(self.update()); + self.update()?; } self.path_source.as_mut().unwrap().read_packages() } @@ -104,7 +104,7 @@ pub fn canonicalize_url(url: &Url) -> Url { impl<'cfg> Debug for GitSource<'cfg> { fn fmt(&self, f: &mut Formatter) -> fmt::Result { - try!(write!(f, "git repo at {}", self.remote.url())); + write!(f, "git repo at {}", self.remote.url())?; match self.reference.to_ref_string() { Some(s) => write!(f, " ({})", s), @@ -123,8 +123,8 @@ impl<'cfg> Registry for GitSource<'cfg> { impl<'cfg> Source for GitSource<'cfg> { fn update(&mut self) -> CargoResult<()> { - let lock = try!(self.config.git_path() - .open_rw(".cargo-lock-git", self.config, "the git checkouts")); + let lock = self.config.git_path() + .open_rw(".cargo-lock-git", self.config, "the git checkouts")?; let db_path = lock.parent().join("db").join(&self.ident); @@ -137,16 +137,16 @@ impl<'cfg> Source for GitSource<'cfg> { self.source_id.precise().is_none(); let (repo, actual_rev) = if should_update { - try!(self.config.shell().status("Updating", - format!("git repository `{}`", self.remote.url()))); + self.config.shell().status("Updating", + format!("git repository `{}`", self.remote.url()))?; trace!("updating git source `{:?}`", self.remote); - let repo = try!(self.remote.checkout(&db_path, &self.config)); - let rev = try!(repo.rev_for(&self.reference)); + let repo = self.remote.checkout(&db_path, &self.config)?; + let rev = repo.rev_for(&self.reference)?; (repo, rev) } else { - (try!(self.remote.db_at(&db_path)), actual_rev.unwrap()) + (self.remote.db_at(&db_path)?, actual_rev.unwrap()) }; let checkout_path = lock.parent().join("checkouts") @@ -157,7 +157,7 @@ impl<'cfg> Source for GitSource<'cfg> { // in scope so the destructors here won't tamper with too much. // Checkout is immutable, so we don't need to protect it with a lock once // it is created. - try!(repo.copy_to(actual_rev.clone(), &checkout_path, &self.config)); + repo.copy_to(actual_rev.clone(), &checkout_path, &self.config)?; let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); let path_source = PathSource::new_recursive(&checkout_path, diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index f7dc89d7fba..7452c1dbbf8 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -105,22 +105,22 @@ impl GitRemote { pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { - let db = try!(self.db_at(path)); + let db = self.db_at(path)?; db.rev_for(reference) } pub fn checkout(&self, into: &Path, cargo_config: &Config) -> CargoResult { let repo = match git2::Repository::open(into) { Ok(repo) => { - try!(self.fetch_into(&repo, &cargo_config).chain_error(|| { + self.fetch_into(&repo, &cargo_config).chain_error(|| { human(format!("failed to fetch into {}", into.display())) - })); + })?; repo } Err(..) => { - try!(self.clone_into(into, &cargo_config).chain_error(|| { + self.clone_into(into, &cargo_config).chain_error(|| { human(format!("failed to clone into: {}", into.display())) - })) + })? } }; @@ -132,7 +132,7 @@ impl GitRemote { } pub fn db_at(&self, db_path: &Path) -> CargoResult { - let repo = try!(git2::Repository::open(db_path)); + let repo = git2::Repository::open(db_path)?; Ok(GitDatabase { remote: self.clone(), path: db_path.to_path_buf(), @@ -150,11 +150,11 @@ impl GitRemote { fn clone_into(&self, dst: &Path, cargo_config: &Config) -> CargoResult { let url = self.url.to_string(); if fs::metadata(&dst).is_ok() { - try!(fs::remove_dir_all(dst)); + fs::remove_dir_all(dst)?; } - try!(fs::create_dir_all(dst)); - let repo = try!(git2::Repository::init_bare(dst)); - try!(fetch(&repo, &url, "refs/heads/*:refs/heads/*", &cargo_config)); + fs::create_dir_all(dst)?; + let repo = git2::Repository::init_bare(dst)?; + fetch(&repo, &url, "refs/heads/*:refs/heads/*", &cargo_config)?; Ok(repo) } } @@ -170,45 +170,45 @@ impl GitDatabase { Ok(repo) => { let checkout = GitCheckout::new(dest, self, rev, repo); if !checkout.is_fresh() { - try!(checkout.fetch(&cargo_config)); - try!(checkout.reset()); + checkout.fetch(&cargo_config)?; + checkout.reset()?; assert!(checkout.is_fresh()); } checkout } - Err(..) => try!(GitCheckout::clone_into(dest, self, rev)), + Err(..) => GitCheckout::clone_into(dest, self, rev)?, }; - try!(checkout.update_submodules(&cargo_config).chain_error(|| { + checkout.update_submodules(&cargo_config).chain_error(|| { internal("failed to update submodules") - })); + })?; Ok(checkout) } pub fn rev_for(&self, reference: &GitReference) -> CargoResult { let id = match *reference { GitReference::Tag(ref s) => { - try!((|| { + (|| { let refname = format!("refs/tags/{}", s); - let id = try!(self.repo.refname_to_id(&refname)); - let obj = try!(self.repo.find_object(id, None)); - let obj = try!(obj.peel(ObjectType::Commit)); + let id = self.repo.refname_to_id(&refname)?; + let obj = self.repo.find_object(id, None)?; + let obj = obj.peel(ObjectType::Commit)?; Ok(obj.id()) }).chain_error(|| { human(format!("failed to find tag `{}`", s)) - })) + })? } GitReference::Branch(ref s) => { - try!((|| { - let b = try!(self.repo.find_branch(s, git2::BranchType::Local)); + (|| { + let b = self.repo.find_branch(s, git2::BranchType::Local)?; b.get().target().chain_error(|| { human(format!("branch `{}` did not have a target", s)) }) }).chain_error(|| { human(format!("failed to find branch `{}`", s)) - })) + })? } GitReference::Rev(ref s) => { - let obj = try!(self.repo.revparse_single(s)); + let obj = self.repo.revparse_single(s)?; obj.id() } }; @@ -216,7 +216,7 @@ impl GitDatabase { } pub fn has_ref(&self, reference: &str) -> CargoResult<()> { - try!(self.repo.revparse_single(reference)); + self.repo.revparse_single(reference)?; Ok(()) } } @@ -238,31 +238,31 @@ impl<'a> GitCheckout<'a> { revision: GitRevision) -> CargoResult> { - let repo = try!(GitCheckout::clone_repo(database.path(), into)); + let repo = GitCheckout::clone_repo(database.path(), into)?; let checkout = GitCheckout::new(into, database, revision, repo); - try!(checkout.reset()); + checkout.reset()?; Ok(checkout) } fn clone_repo(source: &Path, into: &Path) -> CargoResult { let dirname = into.parent().unwrap(); - try!(fs::create_dir_all(&dirname).chain_error(|| { + fs::create_dir_all(&dirname).chain_error(|| { human(format!("Couldn't mkdir {}", dirname.display())) - })); + })?; if fs::metadata(&into).is_ok() { - try!(fs::remove_dir_all(into).chain_error(|| { + fs::remove_dir_all(into).chain_error(|| { human(format!("Couldn't rmdir {}", into.display())) - })); + })?; } - let url = try!(source.to_url()); + let url = source.to_url()?; let url = url.to_string(); - let repo = try!(git2::Repository::clone(&url, into).chain_error(|| { + let repo = git2::Repository::clone(&url, into).chain_error(|| { internal(format!("failed to clone {} into {}", source.display(), into.display())) - })); + })?; Ok(repo) } @@ -278,10 +278,10 @@ impl<'a> GitCheckout<'a> { fn fetch(&self, cargo_config: &Config) -> CargoResult<()> { info!("fetch {}", self.repo.path().display()); - let url = try!(self.database.path.to_url()); + let url = self.database.path.to_url()?; let url = url.to_string(); let refspec = "refs/heads/*:refs/heads/*"; - try!(fetch(&self.repo, &url, refspec, &cargo_config)); + fetch(&self.repo, &url, refspec, &cargo_config)?; Ok(()) } @@ -297,9 +297,9 @@ impl<'a> GitCheckout<'a> { let ok_file = self.location.join(".cargo-ok"); let _ = fs::remove_file(&ok_file); info!("reset {} to {}", self.repo.path().display(), self.revision); - let object = try!(self.repo.find_object(self.revision.0, None)); - try!(self.repo.reset(&object, git2::ResetType::Hard, None)); - try!(File::create(ok_file)); + let object = self.repo.find_object(self.revision.0, None)?; + self.repo.reset(&object, git2::ResetType::Hard, None)?; + File::create(ok_file)?; Ok(()) } @@ -309,11 +309,11 @@ impl<'a> GitCheckout<'a> { fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> { info!("update submodules for: {:?}", repo.workdir().unwrap()); - for mut child in try!(repo.submodules()).into_iter() { - try!(child.init(false)); - let url = try!(child.url().chain_error(|| { + for mut child in repo.submodules()?.into_iter() { + child.init(false)?; + let url = child.url().chain_error(|| { internal("non-utf8 url for submodule") - })); + })?; // A submodule which is listed in .gitmodules but not actually // checked out will not have a head id, so we should ignore it. @@ -327,7 +327,7 @@ impl<'a> GitCheckout<'a> { // as the submodule's head, then we can bail out and go to the // next submodule. let head_and_repo = child.open().and_then(|repo| { - let target = try!(repo.head()).target(); + let target = repo.head()?.target(); Ok((target, repo)) }); let repo = match head_and_repo { @@ -340,20 +340,20 @@ impl<'a> GitCheckout<'a> { Err(..) => { let path = repo.workdir().unwrap().join(child.path()); let _ = fs::remove_dir_all(&path); - try!(git2::Repository::clone(url, &path)) + git2::Repository::clone(url, &path)? } }; // Fetch data from origin and reset to the head commit let refspec = "refs/heads/*:refs/heads/*"; - try!(fetch(&repo, url, refspec, &cargo_config).chain_error(|| { + fetch(&repo, url, refspec, &cargo_config).chain_error(|| { internal(format!("failed to fetch submodule `{}` from {}", child.name().unwrap_or(""), url)) - })); + })?; - let obj = try!(repo.find_object(head, None)); - try!(repo.reset(&obj, git2::ResetType::Hard, None)); - try!(update_submodules(&repo, &cargo_config)); + let obj = repo.find_object(head, None)?; + repo.reset(&obj, git2::ResetType::Hard, None)?; + update_submodules(&repo, &cargo_config)?; } Ok(()) } @@ -569,19 +569,19 @@ pub fn fetch(repo: &git2::Repository, was specified") } - with_authentication(url, &try!(repo.config()), |f| { + with_authentication(url, &repo.config()?, |f| { let mut cb = git2::RemoteCallbacks::new(); cb.credentials(f); // Create a local anonymous remote in the repository to fetch the url - let mut remote = try!(repo.remote_anonymous(&url)); + let mut remote = repo.remote_anonymous(&url)?; let mut opts = git2::FetchOptions::new(); opts.remote_callbacks(cb) .download_tags(git2::AutotagOption::All); - try!(network::with_retry(config, ||{ + network::with_retry(config, ||{ remote.fetch(&[refspec], Some(&mut opts), None) - })); + })?; Ok(()) }) } diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 052d01c7dab..18e6e35da8a 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -56,7 +56,7 @@ impl<'cfg> PathSource<'cfg> { pub fn root_package(&mut self) -> CargoResult { trace!("root_package; source={:?}", self); - try!(self.update()); + self.update()?; match self.packages.iter().find(|p| p.root() == &*self.path) { Some(pkg) => Ok(pkg.clone()), @@ -71,8 +71,8 @@ impl<'cfg> PathSource<'cfg> { ops::read_packages(&self.path, &self.id, self.config) } else { let path = self.path.join("Cargo.toml"); - let (pkg, _) = try!(ops::read_package(&path, &self.id, - self.config)); + let (pkg, _) = ops::read_package(&path, &self.id, + self.config)?; Ok(vec![pkg]) } } @@ -94,10 +94,18 @@ impl<'cfg> PathSource<'cfg> { human(format!("could not parse pattern `{}`: {}", p, e)) }) }; - let exclude = try!(pkg.manifest().exclude().iter() - .map(|p| parse(p)).collect::, _>>()); - let include = try!(pkg.manifest().include().iter() - .map(|p| parse(p)).collect::, _>>()); + + let exclude = pkg.manifest() + .exclude() + .iter() + .map(|p| parse(p)) + .collect::, _>>()?; + + let include = pkg.manifest() + .include() + .iter() + .map(|p| parse(p)) + .collect::, _>>()?; let mut filter = |p: &Path| { let relative_path = util::without_prefix(p, &root).unwrap(); @@ -122,7 +130,7 @@ impl<'cfg> PathSource<'cfg> { // check to see if we are indeed part of the index. If not, then // this is likely an unrelated git repo, so keep going. if let Ok(repo) = git2::Repository::open(cur) { - let index = try!(repo.index()); + let index = repo.index()?; let path = util::without_prefix(root, cur) .unwrap().join("Cargo.toml"); if index.get_path(&path, 0).is_some() { @@ -146,10 +154,10 @@ impl<'cfg> PathSource<'cfg> { filter: &mut FnMut(&Path) -> bool) -> CargoResult> { warn!("list_files_git {}", pkg.package_id()); - let index = try!(repo.index()); - let root = try!(repo.workdir().chain_error(|| { + let index = repo.index()?; + let root = repo.workdir().chain_error(|| { internal_error("Can't list files on a bare repository.", "") - })); + })?; let pkg_path = pkg.root(); let mut ret = Vec::::new(); @@ -171,7 +179,7 @@ impl<'cfg> PathSource<'cfg> { if let Some(suffix) = util::without_prefix(pkg_path, &root) { opts.pathspec(suffix); } - let statuses = try!(repo.statuses(Some(&mut opts))); + let statuses = repo.statuses(Some(&mut opts))?; let untracked = statuses.iter().filter_map(|entry| { match entry.status() { git2::STATUS_WT_NEW => Some((join(&root, entry.path_bytes()), None)), @@ -182,7 +190,7 @@ impl<'cfg> PathSource<'cfg> { let mut subpackages_found = Vec::new(); 'outer: for (file_path, is_dir) in index_files.chain(untracked) { - let file_path = try!(file_path); + let file_path = file_path?; // Filter out files blatantly outside this package. This is helped a // bit obove via the `pathspec` function call, but we need to filter @@ -223,20 +231,20 @@ impl<'cfg> PathSource<'cfg> { if is_dir.unwrap_or_else(|| file_path.is_dir()) { warn!(" found submodule {}", file_path.display()); let rel = util::without_prefix(&file_path, &root).unwrap(); - let rel = try!(rel.to_str().chain_error(|| { + let rel = rel.to_str().chain_error(|| { human(format!("invalid utf-8 filename: {}", rel.display())) - })); + })?; // Git submodules are currently only named through `/` path // separators, explicitly not `\` which windows uses. Who knew? let rel = rel.replace(r"\", "/"); match repo.find_submodule(&rel).and_then(|s| s.open()) { Ok(repo) => { - let files = try!(self.list_files_git(pkg, repo, filter)); + let files = self.list_files_git(pkg, repo, filter)?; ret.extend(files.into_iter()); } Err(..) => { - try!(PathSource::walk(&file_path, &mut ret, false, - filter)); + PathSource::walk(&file_path, &mut ret, false, + filter)?; } } } else if (*filter)(&file_path) { @@ -267,7 +275,7 @@ impl<'cfg> PathSource<'cfg> { fn list_files_walk(&self, pkg: &Package, filter: &mut FnMut(&Path) -> bool) -> CargoResult> { let mut ret = Vec::new(); - try!(PathSource::walk(pkg.root(), &mut ret, true, filter)); + PathSource::walk(pkg.root(), &mut ret, true, filter)?; Ok(ret) } @@ -284,8 +292,8 @@ impl<'cfg> PathSource<'cfg> { if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() { return Ok(()) } - for dir in try!(fs::read_dir(path)) { - let dir = try!(dir).path(); + for dir in fs::read_dir(path)? { + let dir = dir?.path(); let name = dir.file_name().and_then(|s| s.to_str()); // Skip dotfile directories if name.map(|s| s.starts_with('.')) == Some(true) { @@ -297,7 +305,7 @@ impl<'cfg> PathSource<'cfg> { _ => {} } } - try!(PathSource::walk(&dir, ret, false, filter)); + PathSource::walk(&dir, ret, false, filter)?; } Ok(()) } @@ -318,7 +326,7 @@ impl<'cfg> Registry for PathSource<'cfg> { impl<'cfg> Source for PathSource<'cfg> { fn update(&mut self) -> CargoResult<()> { if !self.updated { - let packages = try!(self.read_packages()); + let packages = self.read_packages()?; self.packages.extend(packages.into_iter()); self.updated = true; } @@ -342,7 +350,7 @@ impl<'cfg> Source for PathSource<'cfg> { let mut max = FileTime::zero(); let mut max_path = PathBuf::from(""); - for file in try!(self.list_files(pkg)) { + for file in self.list_files(pkg)? { // An fs::stat error here is either because path is a // broken symlink, a permissions error, or a race // condition where this path was rm'ed - either way, diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs index cda824676d4..86c02802942 100644 --- a/src/cargo/sources/registry/index.rs +++ b/src/cargo/sources/registry/index.rs @@ -41,7 +41,7 @@ impl<'cfg> RegistryIndex<'cfg> { return Ok(s.clone()) } // Ok, we're missing the key, so parse the index file to load it. - try!(self.summaries(pkg.name())); + self.summaries(pkg.name())?; self.hashes.get(&key).chain_error(|| { internal(format!("no hash listed for {}", pkg)) }).map(|s| s.clone()) @@ -55,7 +55,7 @@ impl<'cfg> RegistryIndex<'cfg> { if self.cache.contains_key(name) { return Ok(self.cache.get(name).unwrap()); } - let summaries = try!(self.load_summaries(name)); + let summaries = self.load_summaries(name)?; let summaries = summaries.into_iter().filter(|summary| { summary.0.package_id().name() == name }).collect(); @@ -94,7 +94,7 @@ impl<'cfg> RegistryIndex<'cfg> { match File::open(&path) { Ok(mut f) => { let mut contents = String::new(); - try!(f.read_to_string(&mut contents)); + f.read_to_string(&mut contents)?; let ret: CargoResult>; ret = contents.lines().filter(|l| l.trim().len() > 0) .map(|l| self.parse_registry_package(l)) @@ -116,13 +116,13 @@ impl<'cfg> RegistryIndex<'cfg> { -> CargoResult<(Summary, bool)> { let RegistryPackage { name, vers, cksum, deps, features, yanked - } = try!(json::decode::(line)); - let pkgid = try!(PackageId::new(&name, &vers, &self.source_id)); + } = json::decode::(line)?; + let pkgid = PackageId::new(&name, &vers, &self.source_id)?; let deps: CargoResult> = deps.into_iter().map(|dep| { self.parse_registry_dependency(dep) }).collect(); - let deps = try!(deps); - let summary = try!(Summary::new(pkgid, deps, features)); + let deps = deps?; + let summary = Summary::new(pkgid, deps, features)?; let summary = summary.set_checksum(cksum.clone()); self.hashes.insert((name, vers), cksum); Ok((summary, yanked.unwrap_or(false))) @@ -135,7 +135,7 @@ impl<'cfg> RegistryIndex<'cfg> { name, req, features, optional, default_features, target, kind } = dep; - let dep = try!(DependencyInner::parse(&name, Some(&req), &self.source_id, None)); + let dep = DependencyInner::parse(&name, Some(&req), &self.source_id, None)?; let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { "dev" => Kind::Development, "build" => Kind::Build, @@ -143,7 +143,7 @@ impl<'cfg> RegistryIndex<'cfg> { }; let platform = match target { - Some(target) => Some(try!(target.parse())), + Some(target) => Some(target.parse()?), None => None, }; @@ -166,7 +166,7 @@ impl<'cfg> RegistryIndex<'cfg> { impl<'cfg> Registry for RegistryIndex<'cfg> { fn query(&mut self, dep: &Dependency) -> CargoResult> { let mut summaries = { - let summaries = try!(self.summaries(dep.name())); + let summaries = self.summaries(dep.name())?; summaries.iter().filter(|&&(_, yanked)| { dep.source_id().precise().is_some() || !yanked }).map(|s| s.0.clone()).collect::>() diff --git a/src/cargo/sources/registry/local.rs b/src/cargo/sources/registry/local.rs index 46387bb6841..67e88a9c72d 100644 --- a/src/cargo/sources/registry/local.rs +++ b/src/cargo/sources/registry/local.rs @@ -60,9 +60,9 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> { fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult { let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); - let mut crate_file = try!(self.root.open_ro(&crate_file, - self.config, - "crate file")); + let mut crate_file = self.root.open_ro(&crate_file, + self.config, + "crate file")?; // If we've already got an unpacked version of this crate, then skip the // checksum below as it is in theory already verified. @@ -71,16 +71,16 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> { return Ok(crate_file) } - try!(self.config.shell().status("Unpacking", pkg)); + self.config.shell().status("Unpacking", pkg)?; // We don't actually need to download anything per-se, we just need to // verify the checksum matches the .crate file itself. let mut state = Sha256::new(); let mut buf = [0; 64 * 1024]; loop { - let n = try!(crate_file.read(&mut buf).chain_error(|| { + let n = crate_file.read(&mut buf).chain_error(|| { human(format!("failed to read `{}`", crate_file.path().display())) - })); + })?; if n == 0 { break } @@ -90,7 +90,7 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> { bail!("failed to verify the checksum of `{}`", pkg) } - try!(crate_file.seek(SeekFrom::Start(0))); + crate_file.seek(SeekFrom::Start(0))?; Ok(crate_file) } diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs index 13517fc08d5..dd5056be980 100644 --- a/src/cargo/sources/registry/mod.rs +++ b/src/cargo/sources/registry/mod.rs @@ -288,7 +288,7 @@ impl<'cfg> RegistrySource<'cfg> { -> CargoResult { let dst = self.src_path.join(&format!("{}-{}", pkg.name(), pkg.version())); - try!(dst.create_dir()); + dst.create_dir()?; // Note that we've already got the `tarball` locked above, and that // implies a lock on the unpacked destination as well, so this access // via `into_path_unlocked` should be ok. @@ -298,15 +298,15 @@ impl<'cfg> RegistrySource<'cfg> { return Ok(dst) } - let gz = try!(GzDecoder::new(tarball.file())); + let gz = GzDecoder::new(tarball.file())?; let mut tar = Archive::new(gz); - try!(tar.unpack(dst.parent().unwrap())); - try!(File::create(&ok)); + tar.unpack(dst.parent().unwrap())?; + File::create(&ok)?; Ok(dst) } fn do_update(&mut self) -> CargoResult<()> { - try!(self.ops.update_index()); + self.ops.update_index()?; let path = self.ops.index_path(); self.index = index::RegistryIndex::new(&self.source_id, path, @@ -323,8 +323,8 @@ impl<'cfg> Registry for RegistrySource<'cfg> { // come back with no summaries, then our registry may need to be // updated, so we fall back to performing a lazy update. if dep.source_id().precise().is_some() && !self.updated { - if try!(self.index.query(dep)).is_empty() { - try!(self.do_update()); + if self.index.query(dep)?.is_empty() { + self.do_update()?; } } @@ -346,26 +346,26 @@ impl<'cfg> Source for RegistrySource<'cfg> { // `Some("locked")` as other `Some` values indicate a `cargo update // --precise` request if self.source_id.precise() != Some("locked") { - try!(self.do_update()); + self.do_update()?; } Ok(()) } fn download(&mut self, package: &PackageId) -> CargoResult { - let hash = try!(self.index.hash(package)); - let path = try!(self.ops.download(package, &hash)); - let path = try!(self.unpack_package(package, &path).chain_error(|| { + let hash = self.index.hash(package)?; + let path = self.ops.download(package, &hash)?; + let path = self.unpack_package(package, &path).chain_error(|| { internal(format!("failed to unpack package `{}`", package)) - })); + })?; let mut src = PathSource::new(&path, &self.source_id, self.config); - try!(src.update()); - let pkg = try!(src.download(package)); + src.update()?; + let pkg = src.download(package)?; // Unfortunately the index and the actual Cargo.toml in the index can // differ due to historical Cargo bugs. To paper over these we trash the // *summary* loaded from the Cargo.toml we just downloaded with the one // we loaded from the index. - let summaries = try!(self.index.summaries(package.name())); + let summaries = self.index.summaries(package.name())?; let summary = summaries.iter().map(|s| &s.0).find(|s| { s.package_id() == package }).expect("summary not found"); diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs index d470618b544..480b7185950 100644 --- a/src/cargo/sources/registry/remote.rs +++ b/src/cargo/sources/registry/remote.rs @@ -44,12 +44,12 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { } fn config(&self) -> CargoResult> { - let lock = try!(self.index_path.open_ro(Path::new(INDEX_LOCK), - self.config, - "the registry index")); + let lock = self.index_path.open_ro(Path::new(INDEX_LOCK), + self.config, + "the registry index")?; let path = lock.path().parent().unwrap(); - let contents = try!(paths::read(&path.join("config.json"))); - let config = try!(json::decode(&contents)); + let contents = paths::read(&path.join("config.json"))?; + let config = json::decode(&contents)?; Ok(Some(config)) } @@ -60,29 +60,29 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { // // This way if there's a problem the error gets printed before we even // hit the index, which may not actually read this configuration. - try!(ops::http_handle(self.config)); + ops::http_handle(self.config)?; // Then we actually update the index - try!(self.index_path.create_dir()); - let lock = try!(self.index_path.open_rw(Path::new(INDEX_LOCK), - self.config, - "the registry index")); + self.index_path.create_dir()?; + let lock = self.index_path.open_rw(Path::new(INDEX_LOCK), + self.config, + "the registry index")?; let path = lock.path().parent().unwrap(); - try!(self.config.shell().status("Updating", - format!("registry `{}`", self.source_id.url()))); + self.config.shell().status("Updating", + format!("registry `{}`", self.source_id.url()))?; - let repo = try!(git2::Repository::open(path).or_else(|_| { + let repo = git2::Repository::open(path).or_else(|_| { let _ = lock.remove_siblings(); git2::Repository::init(path) - })); + })?; if self.source_id.url().host_str() == Some("github.com") { if let Ok(oid) = repo.refname_to_id("refs/heads/master") { let handle = match self.handle { Some(ref mut handle) => handle, None => { - self.handle = Some(try!(ops::http_handle(self.config))); + self.handle = Some(ops::http_handle(self.config)?); self.handle.as_mut().unwrap() } }; @@ -99,16 +99,16 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { let url = self.source_id.url().to_string(); let refspec = "refs/heads/*:refs/remotes/origin/*"; - try!(git::fetch(&repo, &url, refspec, &self.config).chain_error(|| { + git::fetch(&repo, &url, refspec, &self.config).chain_error(|| { human(format!("failed to fetch `{}`", url)) - })); + })?; // git reset --hard origin/master let reference = "refs/remotes/origin/master"; - let oid = try!(repo.refname_to_id(reference)); + let oid = repo.refname_to_id(reference)?; trace!("[{}] updating to rev {}", self.source_id, oid); - let object = try!(repo.find_object(oid, None)); - try!(repo.reset(&object, git2::ResetType::Hard, None)); + let object = repo.find_object(oid, None)?; + repo.reset(&object, git2::ResetType::Hard, None)?; Ok(()) } @@ -116,15 +116,28 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { -> CargoResult { let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); let path = Path::new(&filename); - let mut dst = try!(self.cache_path.open_rw(path, self.config, &filename)); - let meta = try!(dst.file().metadata()); + + // Attempt to open an read-only copy first to avoid an exclusive write + // lock and also work with read-only filesystems. Note that we check the + // length of the file like below to handle interrupted downloads. + // + // If this fails then we fall through to the exclusive path where we may + // have to redownload the file. + if let Ok(dst) = self.cache_path.open_ro(path, self.config, &filename) { + let meta = dst.file().metadata()?; + if meta.len() > 0 { + return Ok(dst) + } + } + let mut dst = self.cache_path.open_rw(path, self.config, &filename)?; + let meta = dst.file().metadata()?; if meta.len() > 0 { return Ok(dst) } - try!(self.config.shell().status("Downloading", pkg)); + self.config.shell().status("Downloading", pkg)?; - let config = try!(self.config()).unwrap(); - let mut url = try!(config.dl.to_url()); + let config = self.config()?.unwrap(); + let mut url = config.dl.to_url()?; url.path_segments_mut().unwrap() .push(pkg.name()) .push(&pkg.version().to_string()) @@ -133,30 +146,30 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { let handle = match self.handle { Some(ref mut handle) => handle, None => { - self.handle = Some(try!(ops::http_handle(self.config))); + self.handle = Some(ops::http_handle(self.config)?); self.handle.as_mut().unwrap() } }; // TODO: don't download into memory, but ensure that if we ctrl-c a // download we should resume either from the start or the middle // on the next time - try!(handle.get(true)); - try!(handle.url(&url.to_string())); - try!(handle.follow_location(true)); + handle.get(true)?; + handle.url(&url.to_string())?; + handle.follow_location(true)?; let mut state = Sha256::new(); let mut body = Vec::new(); { let mut handle = handle.transfer(); - try!(handle.write_function(|buf| { + handle.write_function(|buf| { state.update(buf); body.extend_from_slice(buf); Ok(buf.len()) - })); - try!(network::with_retry(self.config, || { + })?; + network::with_retry(self.config, || { handle.perform() - })) + })? } - let code = try!(handle.response_code()); + let code = handle.response_code()?; if code != 200 && code != 0 { bail!("failed to get 200 response from `{}`, got {}", url, code) } @@ -166,8 +179,8 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { bail!("failed to verify the checksum of `{}`", pkg) } - try!(dst.write_all(&body)); - try!(dst.seek(SeekFrom::Start(0))); + dst.write_all(&body)?; + dst.seek(SeekFrom::Start(0))?; Ok(dst) } } diff --git a/src/cargo/sources/replaced.rs b/src/cargo/sources/replaced.rs index 7fb95bdf6c8..e164a1c7cf9 100644 --- a/src/cargo/sources/replaced.rs +++ b/src/cargo/sources/replaced.rs @@ -22,10 +22,10 @@ impl<'cfg> ReplacedSource<'cfg> { impl<'cfg> Registry for ReplacedSource<'cfg> { fn query(&mut self, dep: &Dependency) -> CargoResult> { let dep = dep.clone().map_source(&self.to_replace, &self.replace_with); - let ret = try!(self.inner.query(&dep).chain_error(|| { + let ret = self.inner.query(&dep).chain_error(|| { human(format!("failed to query replaced source `{}`", self.to_replace)) - })); + })?; Ok(ret.into_iter().map(|summary| { summary.map_source(&self.replace_with, &self.to_replace) }).collect()) @@ -42,10 +42,10 @@ impl<'cfg> Source for ReplacedSource<'cfg> { fn download(&mut self, id: &PackageId) -> CargoResult { let id = id.with_source_id(&self.replace_with); - let pkg = try!(self.inner.download(&id).chain_error(|| { + let pkg = self.inner.download(&id).chain_error(|| { human(format!("failed to download replaced source `{}`", self.to_replace)) - })); + })?; Ok(pkg.map_source(&self.replace_with, &self.to_replace)) } diff --git a/src/cargo/util/cfg.rs b/src/cargo/util/cfg.rs index fbdfb919eed..bd89586bc38 100644 --- a/src/cargo/util/cfg.rs +++ b/src/cargo/util/cfg.rs @@ -42,7 +42,7 @@ impl FromStr for Cfg { fn from_str(s: &str) -> CargoResult { let mut p = Parser::new(s); - let e = try!(p.cfg()); + let e = p.cfg()?; if p.t.next().is_some() { bail!("malformed cfg value or key/value pair") } @@ -75,7 +75,7 @@ impl FromStr for CfgExpr { fn from_str(s: &str) -> CargoResult { let mut p = Parser::new(s); - let e = try!(p.expr()); + let e = p.expr()?; if p.t.next().is_some() { bail!("can only have one cfg-expression, consider using all() or \ any() explicitly") @@ -101,9 +101,9 @@ impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for (i, v) in self.0.iter().enumerate() { if i > 0 { - try!(write!(f, ", ")); + write!(f, ", ")?; } - try!(write!(f, "{}", v)); + write!(f, "{}", v)?; } Ok(()) } @@ -125,11 +125,11 @@ impl<'a> Parser<'a> { Some(&Ok(Token::Ident(op @ "any"))) => { self.t.next(); let mut e = Vec::new(); - try!(self.eat(Token::LeftParen)); + self.eat(Token::LeftParen)?; while !self.try(Token::RightParen) { - e.push(try!(self.expr())); + e.push(self.expr()?); if !self.try(Token::Comma) { - try!(self.eat(Token::RightParen)); + self.eat(Token::RightParen)?; break } } @@ -141,9 +141,9 @@ impl<'a> Parser<'a> { } Some(&Ok(Token::Ident("not"))) => { self.t.next(); - try!(self.eat(Token::LeftParen)); - let e = try!(self.expr()); - try!(self.eat(Token::RightParen)); + self.eat(Token::LeftParen)?; + let e = self.expr()?; + self.eat(Token::RightParen)?; Ok(CfgExpr::Not(Box::new(e))) } Some(&Ok(..)) => self.cfg().map(CfgExpr::Value), diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs index e9988fe9496..dc97acc0aac 100644 --- a/src/cargo/util/config.rs +++ b/src/cargo/util/config.rs @@ -53,13 +53,13 @@ impl Config { pub fn default() -> CargoResult { let shell = ::shell(Verbosity::Verbose, ColorConfig::Auto); - let cwd = try!(env::current_dir().chain_error(|| { + let cwd = env::current_dir().chain_error(|| { human("couldn't get the current directory of the process") - })); - let homedir = try!(homedir(&cwd).chain_error(|| { + })?; + let homedir = homedir(&cwd).chain_error(|| { human("Cargo couldn't find your home directory. \ This probably means that $HOME was not set.") - })); + })?; Ok(Config::new(shell, cwd, homedir)) } @@ -90,7 +90,7 @@ impl Config { } pub fn rustc(&self) -> CargoResult<&Rustc> { - self.rustc.get_or_try_init(|| Rustc::new(try!(self.get_tool("rustc")))) + self.rustc.get_or_try_init(|| Rustc::new(self.get_tool("rustc")?)) } pub fn values(&self) -> CargoResult<&HashMap> { @@ -102,7 +102,7 @@ impl Config { pub fn target_dir(&self) -> CargoResult> { if let Some(dir) = env::var_os("CARGO_TARGET_DIR") { Ok(Some(Filesystem::new(self.cwd.join(dir)))) - } else if let Some(val) = try!(self.get_path("build.target-dir")) { + } else if let Some(val) = self.get_path("build.target-dir")? { let val = self.cwd.join(val.val); Ok(Some(Filesystem::new(val))) } else { @@ -111,7 +111,7 @@ impl Config { } fn get(&self, key: &str) -> CargoResult> { - let vals = try!(self.values()); + let vals = self.values()?; let mut parts = key.split('.').enumerate(); let mut val = match vals.get(parts.next().unwrap().1) { Some(val) => val, @@ -152,7 +152,7 @@ impl Config { match env::var(&format!("CARGO_{}", key)) { Ok(value) => { Ok(Some(Value { - val: try!(value.parse()), + val: value.parse()?, definition: Definition::Environment, })) } @@ -161,10 +161,10 @@ impl Config { } pub fn get_string(&self, key: &str) -> CargoResult>> { - if let Some(v) = try!(self.get_env(key)) { + if let Some(v) = self.get_env(key)? { return Ok(Some(v)) } - match try!(self.get(key)) { + match self.get(key)? { Some(CV::String(i, path)) => { Ok(Some(Value { val: i, @@ -177,10 +177,10 @@ impl Config { } pub fn get_bool(&self, key: &str) -> CargoResult>> { - if let Some(v) = try!(self.get_env(key)) { + if let Some(v) = self.get_env(key)? { return Ok(Some(v)) } - match try!(self.get(key)) { + match self.get(key)? { Some(CV::Boolean(b, path)) => { Ok(Some(Value { val: b, @@ -193,7 +193,7 @@ impl Config { } pub fn get_path(&self, key: &str) -> CargoResult>> { - if let Some(val) = try!(self.get_string(&key)) { + if let Some(val) = self.get_string(&key)? { let is_path = val.val.contains('/') || (cfg!(windows) && val.val.contains('\\')); let path = if is_path { @@ -213,7 +213,7 @@ impl Config { pub fn get_list(&self, key: &str) -> CargoResult>>> { - match try!(self.get(key)) { + match self.get(key)? { Some(CV::List(i, path)) => { Ok(Some(Value { val: i, @@ -227,7 +227,7 @@ impl Config { pub fn get_table(&self, key: &str) -> CargoResult>>> { - match try!(self.get(key)) { + match self.get(key)? { Some(CV::Table(i, path)) => { Ok(Some(Value { val: i, @@ -240,10 +240,10 @@ impl Config { } pub fn get_i64(&self, key: &str) -> CargoResult>> { - if let Some(v) = try!(self.get_env(key)) { + if let Some(v) = self.get_env(key)? { return Ok(Some(v)) } - match try!(self.get(key)) { + match self.get(key)? { Some(CV::Integer(i, path)) => { Ok(Some(Value { val: i, @@ -256,7 +256,7 @@ impl Config { } pub fn net_retry(&self) -> CargoResult { - match try!(self.get_i64("net.retry")) { + match self.get_i64("net.retry")? { Some(v) => { let value = v.val; if value < 0 { @@ -316,7 +316,7 @@ impl Config { }; self.shell().set_verbosity(verbosity); - try!(self.shell().set_color_config(color.map(|s| &s[..]))); + self.shell().set_color_config(color.map(|s| &s[..]))?; self.extra_verbose.set(extra_verbose); self.frozen.set(frozen); self.locked.set(locked); @@ -339,23 +339,23 @@ impl Config { fn load_values(&self) -> CargoResult> { let mut cfg = CV::Table(HashMap::new(), PathBuf::from(".")); - try!(walk_tree(&self.cwd, |mut file, path| { + walk_tree(&self.cwd, |mut file, path| { let mut contents = String::new(); - try!(file.read_to_string(&mut contents)); - let table = try!(cargo_toml::parse(&contents, + file.read_to_string(&mut contents)?; + let table = cargo_toml::parse(&contents, &path, self).chain_error(|| { human(format!("could not parse TOML configuration in `{}`", path.display())) - })); + })?; let toml = toml::Value::Table(table); - let value = try!(CV::from_toml(&path, toml).chain_error(|| { + let value = CV::from_toml(&path, toml).chain_error(|| { human(format!("failed to load TOML configuration from `{}`", path.display())) - })); - try!(cfg.merge(value)); + })?; + cfg.merge(value)?; Ok(()) - }).chain_error(|| human("Couldn't load Cargo configuration"))); + }).chain_error(|| human("Couldn't load Cargo configuration"))?; match cfg { @@ -371,7 +371,7 @@ impl Config { } let var = format!("build.{}", tool); - if let Some(tool_path) = try!(self.get_path(&var)) { + if let Some(tool_path) = self.get_path(&var)? { return Ok(tool_path.val); } @@ -414,10 +414,10 @@ impl fmt::Debug for ConfigValue { CV::String(ref s, ref path) => write!(f, "{} (from {})", s, path.display()), CV::List(ref list, ref path) => { - try!(write!(f, "[")); + write!(f, "[")?; for (i, &(ref s, ref path)) in list.iter().enumerate() { - if i > 0 { try!(write!(f, ", ")); } - try!(write!(f, "{} (from {})", s, path.display())); + if i > 0 { write!(f, ", ")?; } + write!(f, "{} (from {})", s, path.display())?; } write!(f, "] (from {})", path.display()) } @@ -448,21 +448,21 @@ impl ConfigValue { toml::Value::Boolean(b) => Ok(CV::Boolean(b, path.to_path_buf())), toml::Value::Integer(i) => Ok(CV::Integer(i, path.to_path_buf())), toml::Value::Array(val) => { - Ok(CV::List(try!(val.into_iter().map(|toml| { + Ok(CV::List(val.into_iter().map(|toml| { match toml { toml::Value::String(val) => Ok((val, path.to_path_buf())), v => Err(human(format!("expected string but found {} \ in list", v.type_str()))), } - }).collect::>()), path.to_path_buf())) + }).collect::>()?, path.to_path_buf())) } toml::Value::Table(val) => { - Ok(CV::Table(try!(val.into_iter().map(|(key, value)| { - let value = try!(CV::from_toml(path, value).chain_error(|| { + Ok(CV::Table(val.into_iter().map(|(key, value)| { + let value = CV::from_toml(path, value).chain_error(|| { human(format!("failed to parse key `{}`", key)) - })); + })?; Ok((key, value)) - }).collect::>()), path.to_path_buf())) + }).collect::>()?, path.to_path_buf())) } v => bail!("found TOML configuration value of unknown type `{}`", v.type_str()), @@ -485,7 +485,7 @@ impl ConfigValue { Occupied(mut entry) => { let path = value.definition_path().to_path_buf(); let entry = entry.get_mut(); - try!(entry.merge(value).chain_error(|| { + entry.merge(value).chain_error(|| { human(format!("failed to merge key `{}` between \ files:\n \ file 1: {}\n \ @@ -494,7 +494,7 @@ impl ConfigValue { entry.definition_path().display(), path.display())) - })); + })?; } Vacant(entry) => { entry.insert(value); } }; @@ -664,9 +664,9 @@ fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()> loop { let possible = current.join(".cargo").join("config"); if fs::metadata(&possible).is_ok() { - let file = try!(File::open(&possible)); + let file = File::open(&possible)?; - try!(walk(file, &possible)); + walk(file, &possible)?; stash.insert(possible); } @@ -680,14 +680,14 @@ fn walk_tree(pwd: &Path, mut walk: F) -> CargoResult<()> // Once we're done, also be sure to walk the home directory even if it's not // in our history to be sure we pick up that standard location for // information. - let home = try!(homedir(pwd).chain_error(|| { + let home = homedir(pwd).chain_error(|| { human("Cargo couldn't find your home directory. \ This probably means that $HOME was not set.") - })); + })?; let config = home.join("config"); if !stash.contains(&config) && fs::metadata(&config).is_ok() { - let file = try!(File::open(&config)); - try!(walk(file, &config)); + let file = File::open(&config)?; + walk(file, &config)?; } Ok(()) @@ -704,20 +704,20 @@ pub fn set_config(cfg: &Config, // 3. This blows away the previous ordering of a file. let mut file = match loc { Location::Global => { - try!(cfg.home_path.create_dir()); - try!(cfg.home_path.open_rw(Path::new("config"), cfg, - "the global config file")) + cfg.home_path.create_dir()?; + cfg.home_path.open_rw(Path::new("config"), cfg, + "the global config file")? } Location::Project => unimplemented!(), }; let mut contents = String::new(); let _ = file.read_to_string(&mut contents); - let mut toml = try!(cargo_toml::parse(&contents, file.path(), cfg)); + let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?; toml.insert(key.to_string(), value.into_toml()); let contents = toml::Value::Table(toml).to_string(); - try!(file.seek(SeekFrom::Start(0))); - try!(file.write_all(contents.as_bytes())); - try!(file.file().set_len(contents.len() as u64)); + file.seek(SeekFrom::Start(0))?; + file.write_all(contents.as_bytes())?; + file.file().set_len(contents.len() as u64)?; Ok(()) } diff --git a/src/cargo/util/errors.rs b/src/cargo/util/errors.rs index 0f1bfe9efb4..853b8bef9d5 100644 --- a/src/cargo/util/errors.rs +++ b/src/cargo/util/errors.rs @@ -190,9 +190,9 @@ struct ConcreteCargoError { impl fmt::Display for ConcreteCargoError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "{}", self.description)); + write!(f, "{}", self.description)?; if let Some(ref s) = self.detail { - try!(write!(f, " ({})", s)); + write!(f, " ({})", s)?; } Ok(()) } diff --git a/src/cargo/util/flock.rs b/src/cargo/util/flock.rs index f90fa596f28..a20540d06c3 100644 --- a/src/cargo/util/flock.rs +++ b/src/cargo/util/flock.rs @@ -50,16 +50,16 @@ impl FileLock { /// needs to be cleared out as it may be corrupt. pub fn remove_siblings(&self) -> io::Result<()> { let path = self.path(); - for entry in try!(path.parent().unwrap().read_dir()) { - let entry = try!(entry); + for entry in path.parent().unwrap().read_dir()? { + let entry = entry?; if Some(&entry.file_name()[..]) == path.file_name() { continue } - let kind = try!(entry.file_type()); + let kind = entry.file_type()?; if kind.is_dir() { - try!(fs::remove_dir_all(entry.path())); + fs::remove_dir_all(entry.path())?; } else { - try!(fs::remove_file(entry.path())); + fs::remove_file(entry.path())?; } } Ok(()) @@ -204,26 +204,26 @@ impl Filesystem { // If we want an exclusive lock then if we fail because of NotFound it's // likely because an intermediate directory didn't exist, so try to // create the directory and then continue. - let f = try!(opts.open(&path).or_else(|e| { + let f = opts.open(&path).or_else(|e| { if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive { - try!(create_dir_all(path.parent().unwrap())); + create_dir_all(path.parent().unwrap())?; opts.open(&path) } else { Err(e) } }).chain_error(|| { human(format!("failed to open: {}", path.display())) - })); + })?; match state { State::Exclusive => { - try!(acquire(config, msg, &path, - &|| f.try_lock_exclusive(), - &|| f.lock_exclusive())); + acquire(config, msg, &path, + &|| f.try_lock_exclusive(), + &|| f.lock_exclusive())?; } State::Shared => { - try!(acquire(config, msg, &path, - &|| f.try_lock_shared(), - &|| f.lock_shared())); + acquire(config, msg, &path, + &|| f.try_lock_shared(), + &|| f.lock_shared())?; } State::Unlocked => {} @@ -285,7 +285,7 @@ fn acquire(config: &Config, } } let msg = format!("waiting for file lock on {}", msg); - try!(config.shell().err().say_status("Blocking", &msg, CYAN, true)); + config.shell().err().say_status("Blocking", &msg, CYAN, true)?; return block().chain_error(|| { human(format!("failed to lock file: {}", path.display())) diff --git a/src/cargo/util/graph.rs b/src/cargo/util/graph.rs index cc0414f6188..6543c8f9179 100644 --- a/src/cargo/util/graph.rs +++ b/src/cargo/util/graph.rs @@ -69,17 +69,17 @@ impl Graph { impl fmt::Debug for Graph { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - try!(writeln!(fmt, "Graph {{")); + writeln!(fmt, "Graph {{")?; for (n, e) in self.nodes.iter() { - try!(writeln!(fmt, " - {}", n)); + writeln!(fmt, " - {}", n)?; for n in e.iter() { - try!(writeln!(fmt, " - {}", n)); + writeln!(fmt, " - {}", n)?; } } - try!(write!(fmt, "}}")); + write!(fmt, "}}")?; Ok(()) } diff --git a/src/cargo/util/lazy_cell.rs b/src/cargo/util/lazy_cell.rs index 3cdaa641692..fc751dc3cf2 100644 --- a/src/cargo/util/lazy_cell.rs +++ b/src/cargo/util/lazy_cell.rs @@ -58,7 +58,7 @@ impl LazyCell { where F: FnOnce() -> Result { if self.borrow().is_none() { - if let Err(_) = self.fill(try!(init())) { + if let Err(_) = self.fill(init()?) { unreachable!(); } } diff --git a/src/cargo/util/network.rs b/src/cargo/util/network.rs index a53d04d2841..4bc12b7b4e6 100644 --- a/src/cargo/util/network.rs +++ b/src/cargo/util/network.rs @@ -14,14 +14,14 @@ pub fn with_retry(config: &Config, mut callback: F) -> CargoResult where F: FnMut() -> Result, E: errors::NetworkError { - let mut remaining = try!(config.net_retry()); + let mut remaining = config.net_retry()?; loop { match callback() { Ok(ret) => return Ok(ret), Err(ref e) if e.maybe_spurious() && remaining > 0 => { let msg = format!("spurious network error ({} tries \ remaining): {}", remaining, e); - try!(config.shell().warn(msg)); + config.shell().warn(msg)?; remaining -= 1; } Err(e) => return Err(Box::new(e)), diff --git a/src/cargo/util/paths.rs b/src/cargo/util/paths.rs index 8444c3fd209..d47598a2ece 100644 --- a/src/cargo/util/paths.rs +++ b/src/cargo/util/paths.rs @@ -70,8 +70,8 @@ pub fn without_prefix<'a>(a: &'a Path, b: &'a Path) -> Option<&'a Path> { pub fn read(path: &Path) -> CargoResult { (|| -> CargoResult<_> { let mut ret = String::new(); - let mut f = try!(File::open(path)); - try!(f.read_to_string(&mut ret)); + let mut f = File::open(path)?; + f.read_to_string(&mut ret)?; Ok(ret) })().map_err(human).chain_error(|| { human(format!("failed to read `{}`", path.display())) @@ -81,8 +81,8 @@ pub fn read(path: &Path) -> CargoResult { pub fn read_bytes(path: &Path) -> CargoResult> { (|| -> CargoResult<_> { let mut ret = Vec::new(); - let mut f = try!(File::open(path)); - try!(f.read_to_end(&mut ret)); + let mut f = File::open(path)?; + f.read_to_end(&mut ret)?; Ok(ret) })().map_err(human).chain_error(|| { human(format!("failed to read `{}`", path.display())) @@ -91,8 +91,8 @@ pub fn read_bytes(path: &Path) -> CargoResult> { pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> { (|| -> CargoResult<()> { - let mut f = try!(File::create(path)); - try!(f.write_all(contents)); + let mut f = File::create(path)?; + f.write_all(contents)?; Ok(()) })().map_err(human).chain_error(|| { human(format!("failed to write `{}`", path.display())) @@ -101,13 +101,13 @@ pub fn write(path: &Path, contents: &[u8]) -> CargoResult<()> { pub fn append(path: &Path, contents: &[u8]) -> CargoResult<()> { (|| -> CargoResult<()> { - let mut f = try!(OpenOptions::new() - .write(true) - .append(true) - .create(true) - .open(path)); + let mut f = OpenOptions::new() + .write(true) + .append(true) + .create(true) + .open(path)?; - try!(f.write_all(contents)); + f.write_all(contents)?; Ok(()) }).chain_error(|| { internal(format!("failed to write `{}`", path.display())) diff --git a/src/cargo/util/process_builder.rs b/src/cargo/util/process_builder.rs index b66b26ec0a4..ed92321896e 100644 --- a/src/cargo/util/process_builder.rs +++ b/src/cargo/util/process_builder.rs @@ -18,10 +18,10 @@ pub struct ProcessBuilder { impl fmt::Display for ProcessBuilder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - try!(write!(f, "`{}", self.program.to_string_lossy())); + write!(f, "`{}", self.program.to_string_lossy())?; for arg in self.args.iter() { - try!(write!(f, " {}", escape(arg.to_string_lossy()))); + write!(f, " {}", escape(arg.to_string_lossy()))?; } write!(f, "`") @@ -74,11 +74,11 @@ impl ProcessBuilder { pub fn exec(&self) -> Result<(), ProcessError> { let mut command = self.build_command(); - let exit = try!(command.status().map_err(|e| { + let exit = command.status().map_err(|e| { process_error(&format!("could not execute process `{}`", self.debug_string()), Some(Box::new(e)), None, None) - })); + })?; if exit.success() { Ok(()) @@ -108,11 +108,11 @@ impl ProcessBuilder { pub fn exec_with_output(&self) -> Result { let mut command = self.build_command(); - let output = try!(command.output().map_err(|e| { + let output = command.output().map_err(|e| { process_error(&format!("could not execute process `{}`", self.debug_string()), Some(Box::new(e)), None, None) - })); + })?; if output.status.success() { Ok(output) @@ -136,11 +136,11 @@ impl ProcessBuilder { .stdin(Stdio::null()); let mut callback_error = None; - let status = try!((|| { - let mut child = try!(cmd.spawn()); + let status = (|| { + let mut child = cmd.spawn()?; let out = child.stdout.take().unwrap(); let err = child.stderr.take().unwrap(); - try!(read2(out, err, &mut |is_out, data, eof| { + read2(out, err, &mut |is_out, data, eof| { let idx = if eof { data.len() } else { @@ -164,13 +164,13 @@ impl ProcessBuilder { callback_error = Some(e); } } - })); + })?; child.wait() })().map_err(|e| { process_error(&format!("could not execute process `{}`", self.debug_string()), Some(Box::new(e)), None, None) - })); + })?; let output = Output { stdout: stdout, stderr: stderr, diff --git a/src/cargo/util/read2.rs b/src/cargo/util/read2.rs index 4596b260a13..7eac02783f8 100644 --- a/src/cargo/util/read2.rs +++ b/src/cargo/util/read2.rs @@ -62,11 +62,11 @@ mod imp { } } }; - if !out_done && try!(handle(out_pipe.read_to_end(&mut out))) { + if !out_done && handle(out_pipe.read_to_end(&mut out))? { out_done = true; } data(true, &mut out, out_done); - if !err_done && try!(handle(err_pipe.read_to_end(&mut err))) { + if !err_done && handle(err_pipe.read_to_end(&mut err))? { err_done = true; } data(false, &mut err, err_done); @@ -116,29 +116,29 @@ mod imp { let mut out = Vec::new(); let mut err = Vec::new(); - let port = try!(CompletionPort::new(1)); - try!(port.add_handle(0, &out_pipe)); - try!(port.add_handle(1, &err_pipe)); + let port = CompletionPort::new(1)?; + port.add_handle(0, &out_pipe)?; + port.add_handle(1, &err_pipe)?; unsafe { let mut out_pipe = Pipe::new(out_pipe, &mut out); let mut err_pipe = Pipe::new(err_pipe, &mut err); - try!(out_pipe.read()); - try!(err_pipe.read()); + out_pipe.read()?; + err_pipe.read()?; let mut status = [CompletionStatus::zero(), CompletionStatus::zero()]; while !out_pipe.done || !err_pipe.done { - for status in try!(port.get_many(&mut status, None)) { + for status in port.get_many(&mut status, None)? { if status.token() == 0 { out_pipe.complete(status); data(true, out_pipe.dst, out_pipe.done); - try!(out_pipe.read()); + out_pipe.read()?; } else { err_pipe.complete(status); data(false, err_pipe.dst, err_pipe.done); - try!(err_pipe.read()); + err_pipe.read()?; } } } diff --git a/src/cargo/util/rustc.rs b/src/cargo/util/rustc.rs index 59a6e17e1fe..954f6d88d95 100644 --- a/src/cargo/util/rustc.rs +++ b/src/cargo/util/rustc.rs @@ -25,20 +25,20 @@ impl Rustc { let (cap_lints, output) = match first.exec_with_output() { Ok(output) => (true, output), - Err(..) => (false, try!(cmd.exec_with_output())), + Err(..) => (false, cmd.exec_with_output()?), }; - let verbose_version = try!(String::from_utf8(output.stdout).map_err(|_| { + let verbose_version = String::from_utf8(output.stdout).map_err(|_| { internal("rustc -v didn't return utf8 output") - })); + })?; let host = { let triple = verbose_version.lines().find(|l| { l.starts_with("host: ") }).map(|l| &l[6..]); - let triple = try!(triple.chain_error(|| { + let triple = triple.chain_error(|| { internal("rustc -v didn't have a line for `host:`") - })); + })?; triple.to_string() }; diff --git a/src/cargo/util/sha256.rs b/src/cargo/util/sha256.rs index 80a48d84b50..376455d6a71 100644 --- a/src/cargo/util/sha256.rs +++ b/src/cargo/util/sha256.rs @@ -7,13 +7,14 @@ mod imp { extern crate openssl; use std::io::Write; - use self::openssl::crypto::hash::{Hasher, Type}; + use self::openssl::hash::{Hasher, MessageDigest}; pub struct Sha256(Hasher); impl Sha256 { pub fn new() -> Sha256 { - Sha256(Hasher::new(Type::SHA256)) + let hasher = Hasher::new(MessageDigest::sha256()).unwrap(); + Sha256(hasher) } pub fn update(&mut self, bytes: &[u8]) { @@ -22,7 +23,8 @@ mod imp { pub fn finish(&mut self) -> [u8; 32] { let mut ret = [0u8; 32]; - ret.copy_from_slice(&self.0.finish()[..]); + let data = self.0.finish().unwrap(); + ret.copy_from_slice(&data[..]); ret } } diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs index 2708abd90b0..c5d70b7098b 100644 --- a/src/cargo/util/toml.rs +++ b/src/cargo/util/toml.rs @@ -112,11 +112,11 @@ pub fn to_manifest(contents: &str, Some(path) => path.to_path_buf(), None => manifest.clone(), }; - let root = try!(parse(contents, &manifest, config)); + let root = parse(contents, &manifest, config)?; let mut d = toml::Decoder::new(toml::Value::Table(root)); - let manifest: TomlManifest = try!(Decodable::decode(&mut d).map_err(|e| { + let manifest: TomlManifest = Decodable::decode(&mut d).map_err(|e| { human(e.to_string()) - })); + })?; return match manifest.to_real_manifest(source_id, &layout, config) { Ok((mut manifest, paths)) => { @@ -181,7 +181,7 @@ The TOML spec requires newlines after table definitions (e.g. `[a] b = 1` is invalid), but this file has a table header which does not have a newline after it. A newline needs to be added and this warning will soon become a hard error in the future.", file.display()); - try!(config.shell().warn(&msg)); + config.shell().warn(&msg)?; return Ok(toml) } @@ -321,7 +321,7 @@ pub struct TomlVersion { impl Decodable for TomlVersion { fn decode(d: &mut D) -> Result { - let s = try!(d.read_str()); + let s = d.read_str()?; match s.to_semver() { Ok(s) => Ok(TomlVersion { version: s }), Err(e) => Err(d.error(&e)), @@ -428,15 +428,15 @@ impl TomlManifest { let mut warnings = vec![]; let project = self.project.as_ref().or_else(|| self.package.as_ref()); - let project = try!(project.chain_error(|| { + let project = project.chain_error(|| { human("no `package` or `project` section found.") - })); + })?; if project.name.trim().is_empty() { bail!("package name cannot be an empty string.") } - let pkgid = try!(project.to_package_id(source_id)); + let pkgid = project.to_package_id(source_id)?; let metadata = pkgid.generate_metadata(); // If we have no lib at all, use the inferred lib if available @@ -445,8 +445,8 @@ impl TomlManifest { let lib = match self.lib { Some(ref lib) => { - try!(lib.validate_library_name()); - try!(lib.validate_crate_type()); + lib.validate_library_name()?; + lib.validate_crate_type()?; Some( TomlTarget { name: lib.name.clone().or(Some(project.name.clone())), @@ -465,7 +465,7 @@ impl TomlManifest { let bin = layout.main(); for target in bins { - try!(target.validate_binary_name()); + target.validate_binary_name()?; } bins.iter().map(|t| { @@ -494,7 +494,7 @@ impl TomlManifest { let examples = match self.example { Some(ref examples) => { for target in examples { - try!(target.validate_example_name()); + target.validate_example_name()?; } examples.clone() } @@ -504,7 +504,7 @@ impl TomlManifest { let tests = match self.test { Some(ref tests) => { for target in tests { - try!(target.validate_test_name()); + target.validate_test_name()?; } tests.clone() } @@ -514,7 +514,7 @@ impl TomlManifest { let benches = match self.bench { Some(ref benches) => { for target in benches { - try!(target.validate_bench_name()); + target.validate_bench_name()?; } benches.clone() } @@ -589,7 +589,7 @@ impl TomlManifest { None => return Ok(()) }; for (n, v) in dependencies.iter() { - let dep = try!(v.to_dependency(n, cx, kind)); + let dep = v.to_dependency(n, cx, kind)?; cx.deps.push(dep); } @@ -597,27 +597,24 @@ impl TomlManifest { } // Collect the deps - try!(process_dependencies(&mut cx, self.dependencies.as_ref(), - None)); - try!(process_dependencies(&mut cx, self.dev_dependencies.as_ref(), - Some(Kind::Development))); - try!(process_dependencies(&mut cx, self.build_dependencies.as_ref(), - Some(Kind::Build))); + process_dependencies(&mut cx, self.dependencies.as_ref(), + None)?; + process_dependencies(&mut cx, self.dev_dependencies.as_ref(), + Some(Kind::Development))?; + process_dependencies(&mut cx, self.build_dependencies.as_ref(), + Some(Kind::Build))?; for (name, platform) in self.target.iter().flat_map(|t| t) { - cx.platform = Some(try!(name.parse())); - try!(process_dependencies(&mut cx, - platform.dependencies.as_ref(), - None)); - try!(process_dependencies(&mut cx, - platform.build_dependencies.as_ref(), - Some(Kind::Build))); - try!(process_dependencies(&mut cx, - platform.dev_dependencies.as_ref(), - Some(Kind::Development))); + cx.platform = Some(name.parse()?); + process_dependencies(&mut cx, platform.dependencies.as_ref(), + None)?; + process_dependencies(&mut cx, platform.build_dependencies.as_ref(), + Some(Kind::Build))?; + process_dependencies(&mut cx, platform.dev_dependencies.as_ref(), + Some(Kind::Development))?; } - replace = try!(self.replace(&mut cx)); + replace = self.replace(&mut cx)?; } { @@ -635,9 +632,7 @@ impl TomlManifest { let exclude = project.exclude.clone().unwrap_or(Vec::new()); let include = project.include.clone().unwrap_or(Vec::new()); - let summary = try!(Summary::new(pkgid, deps, - self.features.clone() - .unwrap_or(HashMap::new()))); + let summary = Summary::new(pkgid, deps, self.features.clone() .unwrap_or(HashMap::new()))?; let metadata = ManifestMetadata { description: project.description.clone(), homepage: project.homepage.clone(), @@ -716,7 +711,7 @@ impl TomlManifest { let mut nested_paths = Vec::new(); let mut warnings = Vec::new(); let mut deps = Vec::new(); - let replace = try!(self.replace(&mut Context { + let replace = self.replace(&mut Context { pkgid: None, deps: &mut deps, source_id: source_id, @@ -725,7 +720,7 @@ impl TomlManifest { warnings: &mut warnings, platform: None, layout: layout, - })); + })?; let profiles = build_profiles(&self.profile); let workspace_config = match self.workspace { Some(ref config) => { @@ -742,11 +737,11 @@ impl TomlManifest { -> CargoResult> { let mut replace = Vec::new(); for (spec, replacement) in self.replace.iter().flat_map(|x| x) { - let mut spec = try!(PackageIdSpec::parse(spec).chain_error(|| { + let mut spec = PackageIdSpec::parse(spec).chain_error(|| { human(format!("replacements must specify a valid semver \ version to replace, but `{}` does not", spec)) - })); + })?; if spec.url().is_none() { spec.set_url(CRATES_IO.parse().unwrap()); } @@ -760,13 +755,13 @@ impl TomlManifest { requirement, but found one for `{}`", spec); } - let dep = try!(replacement.to_dependency(spec.name(), cx, None)); + let dep = replacement.to_dependency(spec.name(), cx, None)?; let dep = { - let version = try!(spec.version().chain_error(|| { + let version = spec.version().chain_error(|| { human(format!("replacements must specify a version \ to replace, but `{}` does not", spec)) - })); + })?; let req = VersionReq::exact(version); dep.clone_inner().set_version_req(req) .into_dependency() @@ -866,7 +861,7 @@ impl TomlDependency { .or_else(|| details.tag.clone().map(GitReference::Tag)) .or_else(|| details.rev.clone().map(GitReference::Rev)) .unwrap_or_else(|| GitReference::Branch("master".to_string())); - let loc = try!(git.to_url()); + let loc = git.to_url()?; SourceId::for_git(&loc, reference) }, (None, Some(path)) => { @@ -882,21 +877,21 @@ impl TomlDependency { if cx.source_id.is_path() { let path = cx.layout.root.join(path); let path = util::normalize_path(&path); - try!(SourceId::for_path(&path)) + SourceId::for_path(&path)? } else { cx.source_id.clone() } }, - (None, None) => try!(SourceId::crates_io(cx.config)), + (None, None) => SourceId::crates_io(cx.config)?, }; let version = details.version.as_ref().map(|v| &v[..]); let mut dep = match cx.pkgid { Some(id) => { - try!(DependencyInner::parse(name, version, &new_source_id, - Some((id, cx.config)))) + DependencyInner::parse(name, version, &new_source_id, + Some((id, cx.config)))? } - None => try!(DependencyInner::parse(name, version, &new_source_id, None)), + None => DependencyInner::parse(name, version, &new_source_id, None)?, }; dep = dep.set_features(details.features.unwrap_or(Vec::new())) .set_default_features(details.default_features.unwrap_or(true)) diff --git a/src/cargo/util/vcs.rs b/src/cargo/util/vcs.rs index ffd260680a2..730200316a0 100644 --- a/src/cargo/util/vcs.rs +++ b/src/cargo/util/vcs.rs @@ -9,7 +9,7 @@ pub struct GitRepo; impl GitRepo { pub fn init(path: &Path, _: &Path) -> CargoResult { - try!(git2::Repository::init(path)); + git2::Repository::init(path)?; Ok(GitRepo) } pub fn discover(path: &Path, _: &Path) -> Result { @@ -19,11 +19,11 @@ impl GitRepo { impl HgRepo { pub fn init(path: &Path, cwd: &Path) -> CargoResult { - try!(process("hg").cwd(cwd).arg("init").arg(path).exec()); + process("hg").cwd(cwd).arg("init").arg(path).exec()?; Ok(HgRepo) } pub fn discover(path: &Path, cwd: &Path) -> CargoResult { - try!(process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output()); + process("hg").cwd(cwd).arg("root").cwd(path).exec_with_output()?; Ok(HgRepo) } } diff --git a/src/ci/docker/cross/Dockerfile b/src/ci/docker/cross/Dockerfile new file mode 100644 index 00000000000..e0a9840e05c --- /dev/null +++ b/src/ci/docker/cross/Dockerfile @@ -0,0 +1,2 @@ +FROM alexcrichton/rust-slave-linux-cross:2016-10-11c +ENTRYPOINT [] diff --git a/src/ci/docker/dist/Dockerfile b/src/ci/docker/dist/Dockerfile new file mode 100644 index 00000000000..d87d8f71998 --- /dev/null +++ b/src/ci/docker/dist/Dockerfile @@ -0,0 +1,2 @@ +FROM alexcrichton/rust-slave-dist:2016-09-26 +ENTRYPOINT [] diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh new file mode 100755 index 00000000000..a8f71fab1e7 --- /dev/null +++ b/src/ci/docker/run.sh @@ -0,0 +1,49 @@ +#!/bin/sh +# Copyright 2016 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -e + +script=`cd $(dirname $0) && pwd`/`basename $0` +image=$1 +TARGET=$2 + +docker_dir="`dirname $script`" +ci_dir="`dirname $docker_dir`" +src_dir="`dirname $ci_dir`" +root_dir="`dirname $src_dir`" + +docker build \ + --rm \ + -t rust-ci \ + "`dirname "$script"`/$image" + +mkdir -p $HOME/.cargo +mkdir -p target + +exec docker run \ + --user `id -u`:`id -g` \ + --volume "$root_dir:/checkout:ro" \ + --workdir /tmp \ + --env CFG_DISABLE_CROSS_TESTS=$CFG_DISABLE_CROSS_TESTS \ + --env MAKE_TARGETS="$MAKE_TARGETS" \ + --env SRC=/checkout \ + --env CARGO_HOME=/cargo \ + --volume "$HOME/.cargo:/cargo" \ + --volume `rustc --print sysroot`:/rust:ro \ + --volume `pwd`/target:/tmp/target \ + --interactive \ + --tty \ + rust-ci \ + sh -c "\ + PATH=\$PATH:/rust/bin \ + LD_LIBRARY_PATH=/rust/lib:\$LD_LIBRARY_PATH \ + /checkout/src/ci/run.sh $TARGET" + diff --git a/src/ci/docker/x86_64-musl/Dockerfile b/src/ci/docker/x86_64-musl/Dockerfile new file mode 100644 index 00000000000..4206c3120c7 --- /dev/null +++ b/src/ci/docker/x86_64-musl/Dockerfile @@ -0,0 +1,11 @@ +FROM ubuntu:16.04 + +RUN apt-get update -y && apt-get install -y --no-install-recommends \ + cmake \ + make \ + gcc \ + musl-tools \ + curl \ + ca-certificates \ + libc6-dev \ + git diff --git a/src/ci/run.sh b/src/ci/run.sh new file mode 100755 index 00000000000..456c3f8b2b1 --- /dev/null +++ b/src/ci/run.sh @@ -0,0 +1,32 @@ +#!/bin/sh +# Copyright 2016 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +set -ex + +TARGET=$1 + +if [ -z "$SRC" ]; then + SRC=. +fi + +$SRC/configure \ + --prefix=/tmp/obj/install \ + --target=$TARGET \ + --enable-nightly + +make cargo-$TARGET +make dist-$TARGET + +if [ ! -z "$MAKE_TARGETS" ]; then + for target in "$MAKE_TARGETS"; do + make $target + done +fi diff --git a/src/crates-io/Cargo.toml b/src/crates-io/Cargo.toml index 71393b8a55b..d4e4afe035d 100644 --- a/src/crates-io/Cargo.toml +++ b/src/crates-io/Cargo.toml @@ -13,6 +13,6 @@ name = "crates_io" path = "lib.rs" [dependencies] -curl = "0.3" +curl = "0.4" url = "1.0" rustc-serialize = "0.3" diff --git a/src/crates-io/lib.rs b/src/crates-io/lib.rs index 8ecb932fbde..884460df290 100644 --- a/src/crates-io/lib.rs +++ b/src/crates-io/lib.rs @@ -127,35 +127,35 @@ impl Registry { } pub fn add_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { - let body = try!(json::encode(&OwnersReq { users: owners })); - let body = try!(self.put(format!("/crates/{}/owners", krate), - body.as_bytes())); - assert!(try!(json::decode::(&body)).ok); + let body = json::encode(&OwnersReq { users: owners })?; + let body = self.put(format!("/crates/{}/owners", krate), + body.as_bytes())?; + assert!(json::decode::(&body)?.ok); Ok(()) } pub fn remove_owners(&mut self, krate: &str, owners: &[&str]) -> Result<()> { - let body = try!(json::encode(&OwnersReq { users: owners })); - let body = try!(self.delete(format!("/crates/{}/owners", krate), - Some(body.as_bytes()))); - assert!(try!(json::decode::(&body)).ok); + let body = json::encode(&OwnersReq { users: owners })?; + let body = self.delete(format!("/crates/{}/owners", krate), + Some(body.as_bytes()))?; + assert!(json::decode::(&body)?.ok); Ok(()) } pub fn list_owners(&mut self, krate: &str) -> Result> { - let body = try!(self.get(format!("/crates/{}/owners", krate))); - Ok(try!(json::decode::(&body)).users) + let body = self.get(format!("/crates/{}/owners", krate))?; + Ok(json::decode::(&body)?.users) } pub fn publish(&mut self, krate: &NewCrate, tarball: &File) -> Result<()> { - let json = try!(json::encode(krate)); + let json = json::encode(krate)?; // Prepare the body. The format of the upload request is: // // // (metadata for the package) // // - let stat = try!(tarball.metadata().map_err(Error::Io)); + let stat = tarball.metadata().map_err(Error::Io)?; let header = { let mut w = Vec::new(); w.extend([ @@ -182,57 +182,57 @@ impl Registry { Some(s) => s, None => return Err(Error::TokenMissing), }; - try!(self.handle.put(true)); - try!(self.handle.url(&url)); - try!(self.handle.in_filesize(size as u64)); + self.handle.put(true)?; + self.handle.url(&url)?; + self.handle.in_filesize(size as u64)?; let mut headers = List::new(); - try!(headers.append("Accept: application/json")); - try!(headers.append(&format!("Authorization: {}", token))); - try!(self.handle.http_headers(headers)); + headers.append("Accept: application/json")?; + headers.append(&format!("Authorization: {}", token))?; + self.handle.http_headers(headers)?; - let _body = try!(handle(&mut self.handle, &mut |buf| { + let _body = handle(&mut self.handle, &mut |buf| { body.read(buf).unwrap_or(0) - })); + })?; Ok(()) } pub fn search(&mut self, query: &str, limit: u8) -> Result<(Vec, u32)> { let formated_query = percent_encode(query.as_bytes(), QUERY_ENCODE_SET); - let body = try!(self.req( + let body = self.req( format!("/crates?q={}&per_page={}", formated_query, limit), None, Auth::Unauthorized - )); + )?; - let crates = try!(json::decode::(&body)); + let crates = json::decode::(&body)?; Ok((crates.crates, crates.meta.total)) } pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> { - let body = try!(self.delete(format!("/crates/{}/{}/yank", krate, version), - None)); - assert!(try!(json::decode::(&body)).ok); + let body = self.delete(format!("/crates/{}/{}/yank", krate, version), + None)?; + assert!(json::decode::(&body)?.ok); Ok(()) } pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> { - let body = try!(self.put(format!("/crates/{}/{}/unyank", krate, version), - &[])); - assert!(try!(json::decode::(&body)).ok); + let body = self.put(format!("/crates/{}/{}/unyank", krate, version), + &[])?; + assert!(json::decode::(&body)?.ok); Ok(()) } fn put(&mut self, path: String, b: &[u8]) -> Result { - try!(self.handle.put(true)); + self.handle.put(true)?; self.req(path, Some(b), Auth::Authorized) } fn get(&mut self, path: String) -> Result { - try!(self.handle.get(true)); + self.handle.get(true)?; self.req(path, None, Auth::Authorized) } fn delete(&mut self, path: String, b: Option<&[u8]>) -> Result { - try!(self.handle.custom_request("DELETE")); + self.handle.custom_request("DELETE")?; self.req(path, b, Auth::Authorized) } @@ -240,23 +240,23 @@ impl Registry { path: String, body: Option<&[u8]>, authorized: Auth) -> Result { - try!(self.handle.url(&format!("{}/api/v1{}", self.host, path))); + self.handle.url(&format!("{}/api/v1{}", self.host, path))?; let mut headers = List::new(); - try!(headers.append("Accept: application/json")); - try!(headers.append("Content-Type: application/json")); + headers.append("Accept: application/json")?; + headers.append("Content-Type: application/json")?; if authorized == Auth::Authorized { let token = match self.token.as_ref() { Some(s) => s, None => return Err(Error::TokenMissing), }; - try!(headers.append(&format!("Authorization: {}", token))); + headers.append(&format!("Authorization: {}", token))?; } - try!(self.handle.http_headers(headers)); + self.handle.http_headers(headers)?; match body { Some(mut body) => { - try!(self.handle.upload(true)); - try!(self.handle.in_filesize(body.len() as u64)); + self.handle.upload(true)?; + self.handle.in_filesize(body.len() as u64)?; handle(&mut self.handle, &mut |buf| body.read(buf).unwrap_or(0)) } None => handle(&mut self.handle, &mut |_| 0), @@ -270,19 +270,19 @@ fn handle(handle: &mut Easy, let mut body = Vec::new(); { let mut handle = handle.transfer(); - try!(handle.read_function(|buf| Ok(read(buf)))); - try!(handle.write_function(|data| { + handle.read_function(|buf| Ok(read(buf)))?; + handle.write_function(|data| { body.extend_from_slice(data); Ok(data.len()) - })); - try!(handle.header_function(|data| { + })?; + handle.header_function(|data| { headers.push(String::from_utf8_lossy(data).into_owned()); true - })); - try!(handle.perform()); + })?; + handle.perform()?; } - match try!(handle.response_code()) { + match handle.response_code()? { 0 => {} // file upload url sometimes 200 => {} 403 => return Err(Error::Unauthorized), @@ -310,13 +310,13 @@ impl fmt::Display for Error { Error::NonUtf8Body => write!(f, "response body was not utf-8"), Error::Curl(ref err) => write!(f, "http error: {}", err), Error::NotOkResponse(code, ref headers, ref body) => { - try!(writeln!(f, "failed to get a 200 OK response, got {}", code)); - try!(writeln!(f, "headers:")); + writeln!(f, "failed to get a 200 OK response, got {}", code)?; + writeln!(f, "headers:")?; for header in headers { - try!(writeln!(f, " {}", header)); + writeln!(f, " {}", header)?; } - try!(writeln!(f, "body:")); - try!(writeln!(f, "{}", String::from_utf8_lossy(body))); + writeln!(f, "body:")?; + writeln!(f, "{}", String::from_utf8_lossy(body))?; Ok(()) } Error::Api(ref errs) => { diff --git a/src/etc/install-deps.py b/src/etc/install-deps.py index 350bf1954e6..5f4b650c9f2 100644 --- a/src/etc/install-deps.py +++ b/src/etc/install-deps.py @@ -55,12 +55,19 @@ rust_date = open('src/rustversion.txt').read().strip() url = 'https://static.rust-lang.org/dist/' + rust_date +cargo_url = 'https://static.rust-lang.org/cargo-dist/2016-03-21' def install_via_tarballs(): if os.path.isdir("rustc-install"): shutil.rmtree("rustc-install") + # Download cargo + host_fname = 'cargo-nightly-' + host + '.tar.gz' + download.get(cargo_url + '/' + host_fname, host_fname) + download.unpack(host_fname, "rustc-install", quiet=True, strip=2) + os.remove(host_fname) + # Download the compiler host_fname = 'rustc-nightly-' + host + '.tar.gz' download.get(url + '/' + host_fname, host_fname) diff --git a/src/rustversion.txt b/src/rustversion.txt index 53b3604545e..8918bbc071a 100644 --- a/src/rustversion.txt +++ b/src/rustversion.txt @@ -1 +1 @@ -2016-10-21 +2016-11-03 diff --git a/tests/build-auth.rs b/tests/build-auth.rs index 8784329a9d6..748937d8ffd 100644 --- a/tests/build-auth.rs +++ b/tests/build-auth.rs @@ -41,7 +41,7 @@ fn http_auth_offered() { assert_eq!(req, vec![ "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", "Accept: */*", - "User-Agent: git/1.0 (libgit2 0.23.0)", + "User-Agent: git/1.0 (libgit2 0.24.0)", ].into_iter().map(|s| s.to_string()).collect()); drop(s); @@ -56,7 +56,7 @@ fn http_auth_offered() { "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", "Authorization: Basic Zm9vOmJhcg==", "Accept: */*", - "User-Agent: git/1.0 (libgit2 0.23.0)", + "User-Agent: git/1.0 (libgit2 0.24.0)", ].into_iter().map(|s| s.to_string()).collect()); }); @@ -127,7 +127,10 @@ fn https_something_happens() { let a = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = a.local_addr().unwrap(); let t = thread::spawn(move|| { - drop(a.accept().unwrap()); + let mut s = a.accept().unwrap().0; + drop(s.write(b"1234")); + drop(s.shutdown(std::net::Shutdown::Write)); + drop(s.read(&mut [0; 16])); }); let p = project("foo") @@ -164,7 +167,7 @@ Caused by: // just not verify the error message here. "[..]" } else { - "[[..]] SSL error: [..]" + "[..] SSL error: [..]" }))); t.join().ok().unwrap(); diff --git a/tests/cargotest/Cargo.toml b/tests/cargotest/Cargo.toml index fd5f3431a2d..d33514b1d6e 100644 --- a/tests/cargotest/Cargo.toml +++ b/tests/cargotest/Cargo.toml @@ -11,7 +11,7 @@ bufstream = "0.1" cargo = { path = "../.." } filetime = "0.1" flate2 = "0.2" -git2 = "0.4" +git2 = { version = "0.6", default-features = false } hamcrest = "0.1" kernel32-sys = "0.2" libc = "0.2" diff --git a/tests/cargotest/support/mod.rs b/tests/cargotest/support/mod.rs index ea63261892b..18f11244b06 100644 --- a/tests/cargotest/support/mod.rs +++ b/tests/cargotest/support/mod.rs @@ -315,15 +315,15 @@ impl Execs { } fn match_stdout(&self, actual: &Output) -> ham::MatchResult { - try!(self.match_std(self.expect_stdout.as_ref(), &actual.stdout, - "stdout", &actual.stderr, false)); + self.match_std(self.expect_stdout.as_ref(), &actual.stdout, + "stdout", &actual.stderr, false)?; for expect in self.expect_stdout_contains.iter() { - try!(self.match_std(Some(expect), &actual.stdout, "stdout", - &actual.stderr, true)); + self.match_std(Some(expect), &actual.stdout, "stdout", + &actual.stderr, true)?; } for expect in self.expect_stderr_contains.iter() { - try!(self.match_std(Some(expect), &actual.stderr, "stderr", - &actual.stdout, true)); + self.match_std(Some(expect), &actual.stderr, "stderr", + &actual.stdout, true)?; } if let Some(ref objects) = self.expect_json { @@ -336,7 +336,7 @@ impl Execs { objects.len(), lines.len())); } for (obj, line) in objects.iter().zip(lines) { - try!(self.match_json(obj, line)); + self.match_json(obj, line)?; } } Ok(()) @@ -366,24 +366,29 @@ impl Execs { let mut a = actual.lines(); let e = out.lines(); - let diffs = if partial { - let mut min = self.diff_lines(a.clone(), e.clone(), partial); + if partial { + let mut diffs = self.diff_lines(a.clone(), e.clone(), partial); while let Some(..) = a.next() { let a = self.diff_lines(a.clone(), e.clone(), partial); - if a.len() < min.len() { - min = a; + if a.len() < diffs.len() { + diffs = a; } } - min + ham::expect(diffs.is_empty(), + format!("expected to find:\n\ + {}\n\n\ + did not find in output:\n\ + {}", out, + actual)) } else { - self.diff_lines(a, e, partial) - }; - ham::expect(diffs.is_empty(), - format!("differences:\n\ - {}\n\n\ - other output:\n\ - `{}`", diffs.join("\n"), - String::from_utf8_lossy(extra))) + let diffs = self.diff_lines(a, e, partial); + ham::expect(diffs.is_empty(), + format!("differences:\n\ + {}\n\n\ + other output:\n\ + `{}`", diffs.join("\n"), + String::from_utf8_lossy(extra))) + } } diff --git a/tests/freshness.rs b/tests/freshness.rs index 1a51f2e69b8..b5390a20379 100644 --- a/tests/freshness.rs +++ b/tests/freshness.rs @@ -443,15 +443,16 @@ fn rebuild_tests_if_lib_changes() { #[test] fn test() { foo::foo(); } "#); + p.build(); - assert_that(p.cargo_process("build"), + p.root().move_into_the_past(); + + assert_that(p.cargo("build"), execs().with_status(0)); assert_that(p.cargo("test"), execs().with_status(0)); File::create(&p.root().join("src/lib.rs")).unwrap(); - p.root().move_into_the_past(); - p.root().join("target").move_into_the_past(); assert_that(p.cargo("build"), execs().with_status(0)); diff --git a/tests/init.rs b/tests/init.rs index 315394359d9..cd64118929f 100644 --- a/tests/init.rs +++ b/tests/init.rs @@ -7,13 +7,13 @@ use std::fs::{self, File}; use std::io::prelude::*; use std::env; -use cargo::util::{process, ProcessBuilder}; +use cargo::util::ProcessBuilder; use cargotest::support::{execs, paths, cargo_dir}; use hamcrest::{assert_that, existing_file, existing_dir, is_not}; use tempdir::TempDir; fn cargo_process(s: &str) -> ProcessBuilder { - let mut p = process(&cargo_dir().join("cargo")); + let mut p = cargotest::process(&cargo_dir().join("cargo")); p.arg(s).cwd(&paths::root()).env("HOME", &paths::home()); return p; } diff --git a/tests/net-config.rs b/tests/net-config.rs index deb9ff940a9..be868c2c26f 100644 --- a/tests/net-config.rs +++ b/tests/net-config.rs @@ -26,7 +26,7 @@ fn net_retry_loads_from_config() { assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101) .with_stderr_contains("[WARNING] spurious network error \ -(1 tries remaining): [2/-1] [..]")); +(1 tries remaining): [..]")); } #[test] @@ -50,7 +50,7 @@ fn net_retry_git_outputs_warning() { assert_that(p.cargo_process("build").arg("-v").arg("-j").arg("1"), execs().with_status(101) .with_stderr_contains("[WARNING] spurious network error \ -(2 tries remaining): [2/-1] [..]") +(2 tries remaining): [..]") .with_stderr_contains("\ -[WARNING] spurious network error (1 tries remaining): [2/-1] [..]")); +[WARNING] spurious network error (1 tries remaining): [..]")); } diff --git a/tests/package.rs b/tests/package.rs index f7b3d870cfe..394482cfd4d 100644 --- a/tests/package.rs +++ b/tests/package.rs @@ -10,8 +10,7 @@ use std::fs::{File, OpenOptions}; use std::io::prelude::*; use std::path::{Path, PathBuf}; -use cargo::util::process; -use cargotest::cargo_process; +use cargotest::{cargo_process, process}; use cargotest::support::{project, execs, paths, git, path2url, cargo_dir}; use flate2::read::GzDecoder; use hamcrest::{assert_that, existing_file, contains}; diff --git a/tests/resolve.rs b/tests/resolve.rs index 9129a1e6e92..15f583b51c1 100644 --- a/tests/resolve.rs +++ b/tests/resolve.rs @@ -18,9 +18,7 @@ fn resolve(pkg: PackageId, deps: Vec, -> CargoResult> { let summary = Summary::new(pkg.clone(), deps, HashMap::new()).unwrap(); let method = Method::Everything; - Ok(try!(resolver::resolve(&[(summary, method)], - &[], - registry)).iter().map(|p| { + Ok(resolver::resolve(&[(summary, method)], &[], registry)?.iter().map(|p| { p.clone() }).collect()) } diff --git a/tests/shell.rs b/tests/shell.rs index 549070d4157..4b03aa6f636 100644 --- a/tests/shell.rs +++ b/tests/shell.rs @@ -93,10 +93,10 @@ fn no_term() { fn colored_output(string: &str, color: color::Color) -> CargoResult { let mut term = TerminfoTerminal::new(Vec::new()).unwrap(); - try!(term.reset()); - try!(term.fg(color)); - try!(write!(&mut term, "{}", string)); - try!(term.reset()); - try!(term.flush()); + term.reset()?; + term.fg(color)?; + write!(&mut term, "{}", string)?; + term.reset()?; + term.flush()?; Ok(String::from_utf8_lossy(term.get_ref()).to_string()) } diff --git a/tests/test.rs b/tests/test.rs index a9f78b2be40..8d058dc55c9 100644 --- a/tests/test.rs +++ b/tests/test.rs @@ -2364,3 +2364,37 @@ fn test_release_ignore_panic() { println!("bench"); assert_that(p.cargo("bench").arg("-v"), execs().with_status(0)); } + +#[test] +fn test_many_with_features() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + a = { path = "a" } + + [features] + foo = [] + + [workspace] + "#) + .file("src/lib.rs", "") + .file("a/Cargo.toml", r#" + [package] + name = "a" + version = "0.0.1" + authors = [] + "#) + .file("a/src/lib.rs", ""); + p.build(); + + assert_that(p.cargo("test").arg("-v") + .arg("-p").arg("a") + .arg("-p").arg("foo") + .arg("--features").arg("foo"), + execs().with_status(0)); +}