Skip to content

Commit

Permalink
Remove #[allow(dead_code)] from the codebase (#421)
Browse files Browse the repository at this point in the history
* Remove #[allow(dead_code)] from the codebase

* Remove: dead_code, move: avroschema fn to test

* Fix checks and code style, remove unused code

* Change function name
  • Loading branch information
vivek378521 authored Jun 28, 2024
1 parent 0d6fae0 commit 8955e7b
Show file tree
Hide file tree
Showing 7 changed files with 70 additions and 144 deletions.
1 change: 0 additions & 1 deletion crates/iceberg/src/avro/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,5 @@
// under the License.

//! Avro related codes.
#[allow(dead_code)]
mod schema;
pub(crate) use schema::*;
115 changes: 59 additions & 56 deletions crates/iceberg/src/avro/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -293,39 +293,6 @@ pub(crate) trait AvroSchemaVisitor {
fn primitive(&mut self, schema: &AvroSchema) -> Result<Self::T>;
}

/// Visit avro schema in post order visitor.
pub(crate) fn visit<V: AvroSchemaVisitor>(schema: &AvroSchema, visitor: &mut V) -> Result<V::T> {
match schema {
AvroSchema::Record(record) => {
let field_results = record
.fields
.iter()
.map(|f| visit(&f.schema, visitor))
.collect::<Result<Vec<V::T>>>()?;

visitor.record(record, field_results)
}
AvroSchema::Union(union) => {
let option_results = union
.variants()
.iter()
.map(|f| visit(f, visitor))
.collect::<Result<Vec<V::T>>>()?;

visitor.union(union, option_results)
}
AvroSchema::Array(item) => {
let item_result = visit(item, visitor)?;
visitor.array(schema, item_result)
}
AvroSchema::Map(inner) => {
let item_result = visit(inner, visitor)?;
visitor.map(schema, item_result)
}
schema => visitor.primitive(schema),
}
}

struct AvroSchemaToSchema {
next_id: i32,
}
Expand Down Expand Up @@ -496,29 +463,6 @@ impl AvroSchemaVisitor for AvroSchemaToSchema {
}
}

/// Converts avro schema to iceberg schema.
pub(crate) fn avro_schema_to_schema(avro_schema: &AvroSchema) -> Result<Schema> {
if let AvroSchema::Record(_) = avro_schema {
let mut converter = AvroSchemaToSchema { next_id: 0 };
let typ = visit(avro_schema, &mut converter)?.expect("Iceberg schema should not be none.");
if let Type::Struct(s) = typ {
Schema::builder()
.with_fields(s.fields().iter().cloned())
.build()
} else {
Err(Error::new(
ErrorKind::Unexpected,
format!("Expected to convert avro record schema to struct type, but {typ}"),
))
}
} else {
Err(Error::new(
ErrorKind::DataInvalid,
"Can't convert non record avro schema to iceberg schema: {avro_schema}",
))
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand All @@ -528,6 +472,65 @@ mod tests {
use apache_avro::Schema as AvroSchema;
use std::fs::read_to_string;

/// Visit avro schema in post order visitor.
pub(crate) fn visit<V: AvroSchemaVisitor>(
schema: &AvroSchema,
visitor: &mut V,
) -> Result<V::T> {
match schema {
AvroSchema::Record(record) => {
let field_results = record
.fields
.iter()
.map(|f| visit(&f.schema, visitor))
.collect::<Result<Vec<V::T>>>()?;

visitor.record(record, field_results)
}
AvroSchema::Union(union) => {
let option_results = union
.variants()
.iter()
.map(|f| visit(f, visitor))
.collect::<Result<Vec<V::T>>>()?;

visitor.union(union, option_results)
}
AvroSchema::Array(item) => {
let item_result = visit(item, visitor)?;
visitor.array(schema, item_result)
}
AvroSchema::Map(inner) => {
let item_result = visit(inner, visitor)?;
visitor.map(schema, item_result)
}
schema => visitor.primitive(schema),
}
}
/// Converts avro schema to iceberg schema.
pub(crate) fn avro_schema_to_schema(avro_schema: &AvroSchema) -> Result<Schema> {
if let AvroSchema::Record(_) = avro_schema {
let mut converter = AvroSchemaToSchema { next_id: 0 };
let typ =
visit(avro_schema, &mut converter)?.expect("Iceberg schema should not be none.");
if let Type::Struct(s) = typ {
Schema::builder()
.with_fields(s.fields().iter().cloned())
.build()
} else {
Err(Error::new(
ErrorKind::Unexpected,
format!("Expected to convert avro record schema to struct type, but {typ}"),
))
}
} else {
Err(Error::new(
ErrorKind::DataInvalid,
"Can't convert non record avro schema to iceberg schema: {avro_schema}",
))
}
}

fn read_test_data_file_to_avro_schema(filename: &str) -> AvroSchema {
let input = read_to_string(format!(
"{}/testdata/{}",
Expand Down
11 changes: 3 additions & 8 deletions crates/iceberg/src/expr/visitors/expression_evaluator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ impl ExpressionEvaluator {
/// to see if this [`DataFile`] could possibly contain data that matches
/// the scan's filter.
pub(crate) fn eval(&self, data_file: &DataFile) -> Result<bool> {
let mut visitor = ExpressionEvaluatorVisitor::new(self, data_file.partition());
let mut visitor = ExpressionEvaluatorVisitor::new(data_file.partition());

visit(&mut visitor, &self.partition_filter)
}
Expand All @@ -58,19 +58,14 @@ impl ExpressionEvaluator {
/// specifically for data file partitions.
#[derive(Debug)]
struct ExpressionEvaluatorVisitor<'a> {
/// Reference to an [`ExpressionEvaluator`].
expression_evaluator: &'a ExpressionEvaluator,
/// Reference to a [`DataFile`]'s partition [`Struct`].
partition: &'a Struct,
}

impl<'a> ExpressionEvaluatorVisitor<'a> {
/// Creates a new [`ExpressionEvaluatorVisitor`].
fn new(expression_evaluator: &'a ExpressionEvaluator, partition: &'a Struct) -> Self {
Self {
expression_evaluator,
partition,
}
fn new(partition: &'a Struct) -> Self {
Self { partition }
}
}

Expand Down
69 changes: 0 additions & 69 deletions crates/iceberg/src/expr/visitors/inclusive_metrics_evaluator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1206,64 +1206,6 @@ mod test {
assert!(result, "Should read: id above upper bound");
}

fn test_case_insensitive_integer_not_eq_rewritten() {
let result = InclusiveMetricsEvaluator::eval(
&equal_int_not_case_insensitive("ID", INT_MIN_VALUE - 25),
&get_test_file_1(),
true,
)
.unwrap();
assert!(result, "Should read: id below lower bound");

let result = InclusiveMetricsEvaluator::eval(
&equal_int_not_case_insensitive("ID", INT_MIN_VALUE - 1),
&get_test_file_1(),
true,
)
.unwrap();
assert!(result, "Should read: id below lower bound");

let result = InclusiveMetricsEvaluator::eval(
&equal_int_not_case_insensitive("ID", INT_MIN_VALUE),
&get_test_file_1(),
true,
)
.unwrap();
assert!(result, "Should read: id equal to lower bound");

let result = InclusiveMetricsEvaluator::eval(
&equal_int_not_case_insensitive("ID", INT_MAX_VALUE - 4),
&get_test_file_1(),
true,
)
.unwrap();
assert!(result, "Should read: id between lower and upper bound");

let result = InclusiveMetricsEvaluator::eval(
&equal_int_not_case_insensitive("ID", INT_MAX_VALUE),
&get_test_file_1(),
true,
)
.unwrap();
assert!(result, "Should read: id equal to upper bound");

let result = InclusiveMetricsEvaluator::eval(
&equal_int_not_case_insensitive("ID", INT_MAX_VALUE + 1),
&get_test_file_1(),
true,
)
.unwrap();
assert!(result, "Should read: id above upper bound");

let result = InclusiveMetricsEvaluator::eval(
&equal_int_not_case_insensitive("ID", INT_MAX_VALUE + 6),
&get_test_file_1(),
true,
)
.unwrap();
assert!(result, "Should read: id above upper bound");
}

#[test]
#[should_panic]
fn test_case_sensitive_integer_not_eq_rewritten() {
Expand Down Expand Up @@ -1882,17 +1824,6 @@ mod test {
filter.bind(schema.clone(), true).unwrap()
}

fn equal_int_not_case_insensitive(reference: &str, int_literal: i32) -> BoundPredicate {
let schema = create_test_schema();
let filter = Predicate::Binary(BinaryExpression::new(
Eq,
Reference::new(reference),
Datum::int(int_literal),
))
.not();
filter.bind(schema.clone(), false).unwrap()
}

fn not_equal_int(reference: &str, int_literal: i32) -> BoundPredicate {
let schema = create_test_schema();
let filter = Predicate::Binary(BinaryExpression::new(
Expand Down
10 changes: 3 additions & 7 deletions crates/iceberg/src/expr/visitors/manifest_evaluator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,23 +46,19 @@ impl ManifestEvaluator {
return Ok(true);
}

let mut evaluator = ManifestFilterVisitor::new(self, &manifest_file.partitions);
let mut evaluator = ManifestFilterVisitor::new(&manifest_file.partitions);

visit(&mut evaluator, &self.partition_filter)
}
}

struct ManifestFilterVisitor<'a> {
manifest_evaluator: &'a ManifestEvaluator,
partitions: &'a Vec<FieldSummary>,
}

impl<'a> ManifestFilterVisitor<'a> {
fn new(manifest_evaluator: &'a ManifestEvaluator, partitions: &'a Vec<FieldSummary>) -> Self {
ManifestFilterVisitor {
manifest_evaluator,
partitions,
}
fn new(partitions: &'a Vec<FieldSummary>) -> Self {
ManifestFilterVisitor { partitions }
}
}

Expand Down
2 changes: 0 additions & 2 deletions crates/iceberg/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ pub use catalog::TableIdent;
pub use catalog::TableRequirement;
pub use catalog::TableUpdate;

#[allow(dead_code)]
pub mod table;

mod avro;
Expand All @@ -47,7 +46,6 @@ pub mod spec;

pub mod scan;

#[allow(dead_code)]
pub mod expr;
pub mod transaction;
pub mod transform;
Expand Down
6 changes: 5 additions & 1 deletion crates/iceberg/src/scan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,6 @@ impl<'a> TableScanBuilder<'a> {

/// Table scan.
#[derive(Debug)]
#[allow(dead_code)]
pub struct TableScan {
snapshot: SnapshotRef,
table_metadata: TableMetadataRef,
Expand Down Expand Up @@ -346,6 +345,11 @@ impl TableScan {
}
false
}

/// Returns a reference to the column names of the table scan.
pub fn column_names(&self) -> &[String] {
&self.column_names
}
}

/// Holds the context necessary for file scanning operations
Expand Down

0 comments on commit 8955e7b

Please sign in to comment.