Skip to content

Commit

Permalink
Split cargo-deadlinks and deadlinks into two binaries
Browse files Browse the repository at this point in the history
- Expose public `walk_dir` function, which prints all missing files
- `impl Clone for CheckContext`
- Put cargo dependencies behind a feature gate
- Switch from `Into` to `From` to help type inference
  • Loading branch information
jyn514 committed Oct 30, 2020
1 parent 5af27cd commit 154ed75
Show file tree
Hide file tree
Showing 5 changed files with 137 additions and 62 deletions.
16 changes: 14 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,22 @@ edition = "2018"
repository = "https://github.com/deadlinks/cargo-deadlinks"
readme = "README.md"
license = "MIT OR Apache-2.0"
autobins = false

[[bin]]
name = "cargo-deadlinks"
required-features = ["cargo"]

[[bin]]
name = "deadlinks"

[features]
cargo = ["cargo_metadata", "serde_json"]
default = ["cargo"]

[dependencies]
cargo_metadata = "0.9"
cargo_metadata = { version = "0.9", optional = true }
serde_json = { version = "1.0.34", optional = true }
docopt = "1"
env_logger = "0.8"
html5ever = "0.24"
Expand All @@ -21,7 +34,6 @@ serde = "1.0"
serde_derive = "1.0"
url = "2"
walkdir = "2.1"
serde_json = "1.0.34"

[dev-dependencies]
assert_cmd = "1.0"
Expand Down
71 changes: 12 additions & 59 deletions src/main.rs → src/bin/cargo-deadlinks.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
use log::error;
use serde_derive::Deserialize;

use std::path::{Path, PathBuf};
use std::path::PathBuf;
use std::process;

use cargo_metadata::Metadata;
use docopt::Docopt;
use log::LevelFilter;
use log::error;
use serde_derive::Deserialize;

use rayon::{prelude::*, ThreadPoolBuilder};
use cargo_deadlinks::{walk_dir, CheckContext};

use cargo_deadlinks::{unavailable_urls, CheckContext};
mod shared;

const MAIN_USAGE: &str = "
Check your package's documentation for dead links.
Expand All @@ -35,10 +33,11 @@ struct MainArgs {
flag_check_http: bool,
}

impl Into<CheckContext> for MainArgs {
fn into(self) -> CheckContext {
impl From<MainArgs> for CheckContext {
fn from(args: MainArgs) -> CheckContext {
CheckContext {
check_http: self.flag_check_http,
check_http: args.flag_check_http,
verbose: args.flag_debug,
}
}
}
Expand All @@ -51,13 +50,14 @@ fn main() {
})
.unwrap_or_else(|e| e.exit());

init_logger(&args);
shared::init_logger(args.flag_debug, args.flag_verbose, "cargo_deadlinks");

let dirs = args
.arg_directory
.as_ref()
.map_or_else(determine_dir, |dir| vec![dir.into()]);

let ctx = CheckContext::from(args);
let mut errors = false;
for dir in dirs {
let dir = match dir.canonicalize() {
Expand All @@ -70,7 +70,7 @@ fn main() {
}
};
log::info!("checking directory {:?}", dir);
if walk_dir(&dir, &args) {
if walk_dir(&dir, ctx.clone()) {
errors = true;
}
}
Expand Down Expand Up @@ -107,26 +107,6 @@ pub fn metadata_run(additional_args: Option<String>) -> Result<Metadata, ()> {
Ok(serde_json::from_str(stdout).expect("invalid JSON"))
}

/// Initalizes the logger according to the provided config flags.
fn init_logger(args: &MainArgs) {
use std::io::Write;

let mut builder = env_logger::Builder::new();
builder.format(|f, record| writeln!(f, "{}", record.args()));
match (args.flag_debug, args.flag_verbose) {
(true, _) => {
builder.filter(Some("cargo_deadlinks"), LevelFilter::Debug);
}
(false, true) => {
builder.filter(Some("cargo_deadlinks"), LevelFilter::Info);
}
(false, false) => {
builder.filter(Some("cargo_deadlinks"), LevelFilter::Error);
}
}
builder.init();
}

/// Returns the directory to use as root of the documentation.
///
/// If an directory has been provided as CLI argument that one is used.
Expand Down Expand Up @@ -173,33 +153,6 @@ fn has_docs(target: &cargo_metadata::Target) -> bool {
}
}

/// Traverses a given path recursively, checking all *.html files found.
///
/// Returns whether an error occurred.
fn walk_dir(dir_path: &Path, args: &MainArgs) -> bool {
let pool = ThreadPoolBuilder::new()
.num_threads(num_cpus::get())
.build()
.unwrap();

let ctx = CheckContext {
check_http: args.flag_check_http,
};
pool.install(|| {
unavailable_urls(dir_path, &ctx)
.map(|err| {
if args.flag_debug {
error!("{}", err);
} else {
error!("{}", err.print_shortened(Some(dir_path)));
}
true
})
// ||||||
.reduce(|| false, |initial, new| initial || new)
})
}

#[cfg(test)]
mod test {
use super::has_docs;
Expand Down
61 changes: 61 additions & 0 deletions src/bin/deadlinks.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
use std::path::PathBuf;
use std::process;

use cargo_deadlinks::{walk_dir, CheckContext};
use docopt::Docopt;
use serde_derive::Deserialize;

mod shared;

const MAIN_USAGE: &str = "
Check your package's documentation for dead links.
Usage:
deadlinks <directory> [options]
Options:
-h --help Print this message
--check-http Check 'http' and 'https' scheme links
--debug Use debug output
-v --verbose Use verbose output
-V --version Print version info and exit.
";

#[derive(Debug, Deserialize)]
struct MainArgs {
arg_directory: PathBuf,
flag_verbose: bool,
flag_debug: bool,
flag_check_http: bool,
}

impl From<MainArgs> for CheckContext {
fn from(args: MainArgs) -> CheckContext {
CheckContext {
check_http: args.flag_check_http,
verbose: args.flag_debug,
}
}
}

fn main() {
let args: MainArgs = Docopt::new(MAIN_USAGE)
.and_then(|d| {
d.version(Some(env!("CARGO_PKG_VERSION").to_owned()))
.deserialize()
})
.unwrap_or_else(|e| e.exit());
shared::init_logger(args.flag_debug, args.flag_verbose, "deadlinks");

let dir = match args.arg_directory.canonicalize() {
Ok(dir) => dir,
Err(_) => {
println!("Could not find directory {:?}.", args.arg_directory);
process::exit(1);
}
};
log::info!("checking directory {:?}", dir);
if walk_dir(&dir, args.into()) {
process::exit(1);
}
}
21 changes: 21 additions & 0 deletions src/bin/shared.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
use log::LevelFilter;

/// Initalizes the logger according to the provided config flags.
pub fn init_logger(debug: bool, verbose: bool, krate: &str) {
use std::io::Write;

let mut builder = env_logger::Builder::new();
builder.format(|f, record| writeln!(f, "{}", record.args()));
match (debug, verbose) {
(true, _) => {
builder.filter(Some(krate), LevelFilter::Debug);
}
(false, true) => {
builder.filter(Some(krate), LevelFilter::Info);
}
(false, false) => {
builder.filter(Some(krate), LevelFilter::Error);
}
}
builder.init();
}
30 changes: 29 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ use std::{
path::{Path, PathBuf},
};

use log::error;
use rayon::prelude::*;
use rayon::ThreadPoolBuilder;
use walkdir::{DirEntry, WalkDir};

use check::is_available;
Expand All @@ -14,9 +16,11 @@ pub use check::{CheckError, HttpError};
mod check;
mod parse;

#[derive(Debug)]
// NOTE: this could be Copy, but we intentionally choose not to guarantee that.
#[derive(Clone, Debug)]
pub struct CheckContext {
pub check_http: bool,
pub verbose: bool,
}

#[derive(Debug)]
Expand All @@ -32,6 +36,30 @@ impl fmt::Display for FileError {
}
}

/// Traverses a given path recursively, checking all *.html files found.
///
/// Returns whether an error occurred.
pub fn walk_dir(dir_path: &Path, ctx: CheckContext) -> bool {
let pool = ThreadPoolBuilder::new()
.num_threads(num_cpus::get())
.build()
.unwrap();

pool.install(|| {
unavailable_urls(dir_path, &ctx)
.map(|err| {
if ctx.verbose {
error!("{}", err);
} else {
error!("{}", err.print_shortened(Some(dir_path)));
}
true
})
// ||||||
.reduce(|| false, |initial, new| initial || new)
})
}

impl FileError {
pub fn print_shortened(&self, prefix: Option<&Path>) -> String {
let prefix = prefix.unwrap_or_else(|| Path::new(""));
Expand Down

0 comments on commit 154ed75

Please sign in to comment.