Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix clippy warnings #581

Merged
merged 4 commits into from
Jun 18, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 39 additions & 37 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -321,41 +321,43 @@ jobs:
# Ignore MIRI errors until we can get a clean run
cargo miri test || true

coverage:
name: Coverage
runs-on: ubuntu-latest
strategy:
matrix:
arch: [amd64]
rust: [stable]
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache Cargo
uses: actions/cache@v2
with:
path: /home/runner/.cargo
# this key is not equal because the user is different than on a container (runner vs github)
key: cargo-coverage-cache-
- name: Cache Rust dependencies
uses: actions/cache@v2
with:
path: /home/runner/target
# this key is not equal because coverage uses different compilation flags.
key: ${{ runner.os }}-${{ matrix.arch }}-target-coverage-cache-${{ matrix.rust }}-
- name: Run coverage
run: |
export ARROW_TEST_DATA=$(pwd)/testing/data
export PARQUET_TEST_DATA=$(pwd)/parquet-testing/data
# Coverage job was failing. https://github.com/apache/arrow-datafusion/issues/590 tracks re-instating it

# 2020-11-15: There is a cargo-tarpaulin regression in 0.17.0
# see https://github.com/xd009642/tarpaulin/issues/618
cargo install --version 0.16.0 cargo-tarpaulin
cargo tarpaulin --out Xml
env:
CARGO_HOME: "/home/runner/.cargo"
CARGO_TARGET_DIR: "/home/runner/target"
- name: Report coverage
continue-on-error: true
run: bash <(curl -s https://codecov.io/bash)
# coverage:
# name: Coverage
# runs-on: ubuntu-latest
# strategy:
# matrix:
# arch: [amd64]
# rust: [stable]
# steps:
# - uses: actions/checkout@v2
# with:
# submodules: true
# - name: Cache Cargo
# uses: actions/cache@v2
# with:
# path: /home/runner/.cargo
# # this key is not equal because the user is different than on a container (runner vs github)
# key: cargo-coverage-cache-
# - name: Cache Rust dependencies
# uses: actions/cache@v2
# with:
# path: /home/runner/target
# # this key is not equal because coverage uses different compilation flags.
# key: ${{ runner.os }}-${{ matrix.arch }}-target-coverage-cache-${{ matrix.rust }}-
# - name: Run coverage
# run: |
# export ARROW_TEST_DATA=$(pwd)/testing/data
# export PARQUET_TEST_DATA=$(pwd)/parquet-testing/data

# # 2020-11-15: There is a cargo-tarpaulin regression in 0.17.0
# # see https://github.com/xd009642/tarpaulin/issues/618
# cargo install --version 0.16.0 cargo-tarpaulin
# cargo tarpaulin --out Xml
# env:
# CARGO_HOME: "/home/runner/.cargo"
# CARGO_TARGET_DIR: "/home/runner/target"
# - name: Report coverage
# continue-on-error: true
# run: bash <(curl -s https://codecov.io/bash)
2 changes: 1 addition & 1 deletion ballista/rust/core/src/serde/scheduler/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ impl PartitionStats {
]
}

pub fn to_arrow_arrayref(&self) -> Result<Arc<StructArray>, BallistaError> {
pub fn to_arrow_arrayref(self) -> Result<Arc<StructArray>, BallistaError> {
let mut field_builders = Vec::new();

let mut num_rows_builder = UInt64Builder::new(1);
Expand Down
2 changes: 1 addition & 1 deletion datafusion-cli/src/print_format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ mod tests {

#[test]
fn test_from_str_failure() {
assert_eq!(true, "pretty".parse::<PrintFormat>().is_err());
assert!("pretty".parse::<PrintFormat>().is_err());
}

#[test]
Expand Down
7 changes: 2 additions & 5 deletions datafusion/src/execution/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1125,18 +1125,15 @@ mod tests {
let ctx = create_ctx(&tmp_dir, 1)?;

let schema: Schema = ctx.table("test").unwrap().schema().clone().into();
assert_eq!(schema.field_with_name("c1")?.is_nullable(), false);
assert!(!schema.field_with_name("c1")?.is_nullable());

let plan = LogicalPlanBuilder::scan_empty("", &schema, None)?
.project(vec![col("c1")])?
.build()?;

let plan = ctx.optimize(&plan)?;
let physical_plan = ctx.create_physical_plan(&Arc::new(plan))?;
assert_eq!(
physical_plan.schema().field_with_name("c1")?.is_nullable(),
false
);
assert!(!physical_plan.schema().field_with_name("c1")?.is_nullable());
Ok(())
}

Expand Down
2 changes: 2 additions & 0 deletions datafusion/src/logical_plan/dfschema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -248,12 +248,14 @@ where
}

impl ToDFSchema for Schema {
#[allow(clippy::wrong_self_convention)]
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't understand the suggestion

fn to_dfschema(self) -> Result<DFSchema> {
DFSchema::try_from(self)
}
}

impl ToDFSchema for SchemaRef {
#[allow(clippy::wrong_self_convention)]
fn to_dfschema(self) -> Result<DFSchema> {
// Attempt to use the Schema directly if there are no other
// references, otherwise clone
Expand Down
2 changes: 1 addition & 1 deletion datafusion/src/logical_plan/display.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ impl<'a, 'b> PlanVisitor for GraphvizVisitor<'a, 'b> {
// id [label="foo"]
let label = if self.with_schema {
format!(
"{}\\nSchema: {}",
r"{}\nSchema: {}",
plan.display(),
display_schema(&plan.schema().as_ref().to_owned().into())
)
Expand Down
2 changes: 1 addition & 1 deletion datafusion/src/optimizer/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -551,7 +551,7 @@ mod tests {
stringified_plans,
..
} => {
assert_eq!(*verbose, true);
assert!(*verbose);

let expected_stringified_plans = vec![
StringifiedPlan::new(PlanType::LogicalPlan, "..."),
Expand Down
2 changes: 1 addition & 1 deletion datafusion/src/physical_plan/expressions/not.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ mod tests {

let expr = not(col("a"), &schema)?;
assert_eq!(expr.data_type(&schema)?, DataType::Boolean);
assert_eq!(expr.nullable(&schema)?, true);
assert!(expr.nullable(&schema)?);

let input = BooleanArray::from(vec![Some(true), None, Some(false)]);
let expected = &BooleanArray::from(vec![Some(false), None, Some(true)]);
Expand Down
3 changes: 1 addition & 2 deletions datafusion/src/physical_plan/hash_join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -684,11 +684,10 @@ fn build_join_indexes(
&keys_values,
)? {
left_indices.append_value(i)?;
right_indices.append_value(row as u32)?;
} else {
left_indices.append_null()?;
right_indices.append_value(row as u32)?;
}
right_indices.append_value(row as u32)?;
}
}
None => {
Expand Down
2 changes: 1 addition & 1 deletion datafusion/src/physical_plan/regex_expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ pub fn regexp_match<T: StringOffsetSizeTrait>(args: &[ArrayRef]) -> Result<Array
/// used by regexp_replace
fn regex_replace_posix_groups(replacement: &str) -> String {
lazy_static! {
static ref CAPTURE_GROUPS_RE: Regex = Regex::new("(\\\\)(\\d*)").unwrap();
static ref CAPTURE_GROUPS_RE: Regex = Regex::new(r"(\\)(\d*)").unwrap();
}
CAPTURE_GROUPS_RE
.replace_all(replacement, "$${$2}")
Expand Down
4 changes: 2 additions & 2 deletions datafusion/src/scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1123,7 +1123,7 @@ mod tests {
let array = value.to_array();
let array = array.as_any().downcast_ref::<UInt64Array>().unwrap();
assert_eq!(array.len(), 1);
assert_eq!(false, array.is_null(0));
assert!(!array.is_null(0));
assert_eq!(array.value(0), 13);

let value = ScalarValue::UInt64(None);
Expand All @@ -1139,7 +1139,7 @@ mod tests {
let array = value.to_array();
let array = array.as_any().downcast_ref::<UInt32Array>().unwrap();
assert_eq!(array.len(), 1);
assert_eq!(false, array.is_null(0));
assert!(!array.is_null(0));
assert_eq!(array.value(0), 13);

let value = ScalarValue::UInt32(None);
Expand Down
32 changes: 16 additions & 16 deletions datafusion/src/sql/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1653,7 +1653,7 @@ mod tests {
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
format!(
"Plan(\"Invalid identifier \\\'doesnotexist\\\' for schema {}\")",
r#"Plan("Invalid identifier 'doesnotexist' for schema {}")"#,
PERSON_COLUMN_NAMES
),
format!("{:?}", err)
Expand All @@ -1665,7 +1665,7 @@ mod tests {
let sql = "SELECT age, age FROM person";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"Plan(\"Projections require unique expression names but the expression \\\"#age\\\" at position 0 and \\\"#age\\\" at position 1 have the same name. Consider aliasing (\\\"AS\\\") one of them.\")",
r##"Plan("Projections require unique expression names but the expression \"#age\" at position 0 and \"#age\" at position 1 have the same name. Consider aliasing (\"AS\") one of them.")"##,
format!("{:?}", err)
);
}
Expand All @@ -1675,7 +1675,7 @@ mod tests {
let sql = "SELECT *, age FROM person";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"Plan(\"Projections require unique expression names but the expression \\\"#age\\\" at position 3 and \\\"#age\\\" at position 8 have the same name. Consider aliasing (\\\"AS\\\") one of them.\")",
r##"Plan("Projections require unique expression names but the expression \"#age\" at position 3 and \"#age\" at position 8 have the same name. Consider aliasing (\"AS\") one of them.")"##,
format!("{:?}", err)
);
}
Expand Down Expand Up @@ -1714,7 +1714,7 @@ mod tests {
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
format!(
"Plan(\"Invalid identifier \\\'doesnotexist\\\' for schema {}\")",
r#"Plan("Invalid identifier 'doesnotexist' for schema {}")"#,
PERSON_COLUMN_NAMES
),
format!("{:?}", err)
Expand All @@ -1727,7 +1727,7 @@ mod tests {
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
format!(
"Plan(\"Invalid identifier \\\'x\\\' for schema {}\")",
r#"Plan("Invalid identifier 'x' for schema {}")"#,
PERSON_COLUMN_NAMES
),
format!("{:?}", err)
Expand Down Expand Up @@ -2200,7 +2200,7 @@ mod tests {
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
format!(
"Plan(\"Invalid identifier \\\'doesnotexist\\\' for schema {}\")",
r#"Plan("Invalid identifier 'doesnotexist' for schema {}")"#,
PERSON_COLUMN_NAMES
),
format!("{:?}", err)
Expand All @@ -2212,7 +2212,7 @@ mod tests {
let sql = "SELECT MIN(age), MIN(age) FROM person";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"Plan(\"Projections require unique expression names but the expression \\\"#MIN(age)\\\" at position 0 and \\\"#MIN(age)\\\" at position 1 have the same name. Consider aliasing (\\\"AS\\\") one of them.\")",
r##"Plan("Projections require unique expression names but the expression \"#MIN(age)\" at position 0 and \"#MIN(age)\" at position 1 have the same name. Consider aliasing (\"AS\") one of them.")"##,
format!("{:?}", err)
);
}
Expand Down Expand Up @@ -2242,7 +2242,7 @@ mod tests {
let sql = "SELECT MIN(age) AS a, MIN(age) AS a FROM person";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"Plan(\"Projections require unique expression names but the expression \\\"#MIN(age) AS a\\\" at position 0 and \\\"#MIN(age) AS a\\\" at position 1 have the same name. Consider aliasing (\\\"AS\\\") one of them.\")",
r##"Plan("Projections require unique expression names but the expression \"#MIN(age) AS a\" at position 0 and \"#MIN(age) AS a\" at position 1 have the same name. Consider aliasing (\"AS\") one of them.")"##,
format!("{:?}", err)
);
}
Expand Down Expand Up @@ -2272,7 +2272,7 @@ mod tests {
let sql = "SELECT state AS a, MIN(age) AS a FROM person GROUP BY state";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"Plan(\"Projections require unique expression names but the expression \\\"#state AS a\\\" at position 0 and \\\"#MIN(age) AS a\\\" at position 1 have the same name. Consider aliasing (\\\"AS\\\") one of them.\")",
r##"Plan("Projections require unique expression names but the expression \"#state AS a\" at position 0 and \"#MIN(age) AS a\" at position 1 have the same name. Consider aliasing (\"AS\") one of them.")"##,
format!("{:?}", err)
);
}
Expand All @@ -2293,7 +2293,7 @@ mod tests {
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
format!(
"Plan(\"Invalid identifier \\\'doesnotexist\\\' for schema {}\")",
r#"Plan("Invalid identifier 'doesnotexist' for schema {}")"#,
PERSON_COLUMN_NAMES
),
format!("{:?}", err)
Expand All @@ -2306,7 +2306,7 @@ mod tests {
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
format!(
"Plan(\"Invalid identifier \\\'doesnotexist\\\' for schema {}\")",
r#"Plan("Invalid identifier 'doesnotexist' for schema {}")"#,
PERSON_COLUMN_NAMES
),
format!("{:?}", err)
Expand All @@ -2318,7 +2318,7 @@ mod tests {
let sql = "SELECT INTERVAL '100000000000000000 day'";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"NotImplemented(\"Interval field value out of range: \\\"100000000000000000 day\\\"\")",
r#"NotImplemented("Interval field value out of range: \"100000000000000000 day\"")"#,
format!("{:?}", err)
);
}
Expand All @@ -2328,7 +2328,7 @@ mod tests {
let sql = "SELECT INTERVAL '1 year 1 day'";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"NotImplemented(\"DF does not support intervals that have both a Year/Month part as well as Days/Hours/Mins/Seconds: \\\"1 year 1 day\\\". Hint: try breaking the interval into two parts, one with Year/Month and the other with Days/Hours/Mins/Seconds - e.g. (NOW() + INTERVAL \\\'1 year\\\') + INTERVAL \\\'1 day\\\'\")",
r#"NotImplemented("DF does not support intervals that have both a Year/Month part as well as Days/Hours/Mins/Seconds: \"1 year 1 day\". Hint: try breaking the interval into two parts, one with Year/Month and the other with Days/Hours/Mins/Seconds - e.g. (NOW() + INTERVAL '1 year') + INTERVAL '1 day'")"#,
format!("{:?}", err)
);
}
Expand Down Expand Up @@ -2391,7 +2391,7 @@ mod tests {
let sql = "SELECT state, MIN(age), MIN(age) FROM person GROUP BY state";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"Plan(\"Projections require unique expression names but the expression \\\"#MIN(age)\\\" at position 1 and \\\"#MIN(age)\\\" at position 2 have the same name. Consider aliasing (\\\"AS\\\") one of them.\")",
r##"Plan("Projections require unique expression names but the expression \"#MIN(age)\" at position 1 and \"#MIN(age)\" at position 2 have the same name. Consider aliasing (\"AS\") one of them.")"##,
format!("{:?}", err)
);
}
Expand Down Expand Up @@ -2451,7 +2451,7 @@ mod tests {
"SELECT ((age + 1) / 2) * (age + 9), MIN(first_name) FROM person GROUP BY age + 1";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"Plan(\"Projection references non-aggregate values\")",
r#"Plan("Projection references non-aggregate values")"#,
format!("{:?}", err)
);
}
Expand All @@ -2462,7 +2462,7 @@ mod tests {
let sql = "SELECT age, MIN(first_name) FROM person GROUP BY age + 1";
let err = logical_plan(sql).expect_err("query should have failed");
assert_eq!(
"Plan(\"Projection references non-aggregate values\")",
r#"Plan("Projection references non-aggregate values")"#,
format!("{:?}", err)
);
}
Expand Down