Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

LogicalPlanBuilder now uses TableSource instead of TableProvider #2569

Merged
merged 6 commits into from
May 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions ballista/rust/core/src/serde/logical_plan/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ use datafusion::logical_plan::plan::{
Aggregate, EmptyRelation, Filter, Join, Projection, Sort, SubqueryAlias, Window,
};
use datafusion::logical_plan::{
source_as_provider, Column, CreateCatalog, CreateCatalogSchema, CreateExternalTable,
CreateView, CrossJoin, Expr, JoinConstraint, Limit, LogicalPlan, LogicalPlanBuilder,
Offset, Repartition, TableScan, Values,
provider_as_source, source_as_provider, Column, CreateCatalog, CreateCatalogSchema,
CreateExternalTable, CreateView, CrossJoin, Expr, JoinConstraint, Limit, LogicalPlan,
LogicalPlanBuilder, Offset, Repartition, TableScan, Values,
};
use datafusion::prelude::SessionContext;

Expand Down Expand Up @@ -252,7 +252,7 @@ impl AsLogicalPlan for LogicalPlanNode {

LogicalPlanBuilder::scan_with_filters(
&scan.table_name,
Arc::new(provider),
provider_as_source(Arc::new(provider)),
projection,
filters,
)?
Expand Down
16 changes: 10 additions & 6 deletions datafusion-examples/examples/custom_datasource.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use datafusion::dataframe::DataFrame;
use datafusion::datasource::{TableProvider, TableType};
use datafusion::error::Result;
use datafusion::execution::context::TaskContext;
use datafusion::logical_plan::{Expr, LogicalPlanBuilder};
use datafusion::logical_plan::{provider_as_source, Expr, LogicalPlanBuilder};
use datafusion::physical_plan::expressions::PhysicalSortExpr;
use datafusion::physical_plan::memory::MemoryStream;
use datafusion::physical_plan::{
Expand Down Expand Up @@ -60,11 +60,15 @@ async fn search_accounts(
let ctx = SessionContext::new();

// create logical plan composed of a single TableScan
let logical_plan =
LogicalPlanBuilder::scan_with_filters("accounts", Arc::new(db), None, vec![])
.unwrap()
.build()
.unwrap();
let logical_plan = LogicalPlanBuilder::scan_with_filters(
"accounts",
provider_as_source(Arc::new(db)),
None,
vec![],
)
.unwrap()
.build()
.unwrap();

let mut dataframe = DataFrame::new(ctx.state, &logical_plan)
.select_columns(&["id", "bank_account"])?;
Expand Down
15 changes: 9 additions & 6 deletions datafusion/core/src/execution/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,9 @@ use crate::datasource::listing::ListingTableConfig;
use crate::datasource::TableProvider;
use crate::error::{DataFusionError, Result};
use crate::logical_plan::{
CreateCatalog, CreateCatalogSchema, CreateExternalTable, CreateMemoryTable,
CreateView, DropTable, FileType, FunctionRegistry, LogicalPlan, LogicalPlanBuilder,
UNNAMED_TABLE,
provider_as_source, CreateCatalog, CreateCatalogSchema, CreateExternalTable,
CreateMemoryTable, CreateView, DropTable, FileType, FunctionRegistry, LogicalPlan,
LogicalPlanBuilder, UNNAMED_TABLE,
};
use crate::optimizer::common_subexpr_eliminate::CommonSubexprEliminate;
use crate::optimizer::filter_push_down::FilterPushDown;
Expand Down Expand Up @@ -586,7 +586,9 @@ impl SessionContext {
.with_schema(resolved_schema);
let provider = ListingTable::try_new(config)?;

let plan = LogicalPlanBuilder::scan(path, Arc::new(provider), None)?.build()?;
let plan =
LogicalPlanBuilder::scan(path, provider_as_source(Arc::new(provider)), None)?
.build()?;
Ok(Arc::new(DataFrame::new(self.state.clone(), &plan)))
}

Expand Down Expand Up @@ -620,7 +622,8 @@ impl SessionContext {
pub fn read_table(&self, provider: Arc<dyn TableProvider>) -> Result<Arc<DataFrame>> {
Ok(Arc::new(DataFrame::new(
self.state.clone(),
&LogicalPlanBuilder::scan(UNNAMED_TABLE, provider, None)?.build()?,
&LogicalPlanBuilder::scan(UNNAMED_TABLE, provider_as_source(provider), None)?
.build()?,
)))
}

Expand Down Expand Up @@ -817,7 +820,7 @@ impl SessionContext {
Some(ref provider) => {
let plan = LogicalPlanBuilder::scan(
table_ref.table(),
Arc::clone(provider),
provider_as_source(Arc::clone(provider)),
None,
)?
.build()?;
Expand Down
16 changes: 8 additions & 8 deletions datafusion/core/src/logical_plan/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

//! This module provides a builder for creating LogicalPlans

use crate::datasource::TableProvider;
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was the main objective - removing one of the final dependencies from LogicalPlanBuilder to the "core" datafusion crate.

use crate::error::{DataFusionError, Result};
use crate::logical_expr::ExprSchemable;
use crate::logical_plan::plan::{
Expand All @@ -41,11 +40,12 @@ use std::{
use super::{Expr, JoinConstraint, JoinType, LogicalPlan, PlanType};
use crate::logical_plan::{
columnize_expr, exprlist_to_fields, normalize_col, normalize_cols,
provider_as_source, rewrite_sort_cols_by_aggs, Column, CrossJoin, DFField, DFSchema,
DFSchemaRef, Limit, Offset, Partitioning, Repartition, Values,
rewrite_sort_cols_by_aggs, Column, CrossJoin, DFField, DFSchema, DFSchemaRef, Limit,
Offset, Partitioning, Repartition, Values,
};

use datafusion_common::ToDFSchema;
use datafusion_expr::TableSource;

/// Default table name for unnamed table
pub const UNNAMED_TABLE: &str = "?table?";
Expand Down Expand Up @@ -191,16 +191,16 @@ impl LogicalPlanBuilder {
/// Convert a table provider into a builder with a TableScan
pub fn scan(
table_name: impl Into<String>,
provider: Arc<dyn TableProvider>,
table_source: Arc<dyn TableSource>,
projection: Option<Vec<usize>>,
) -> Result<Self> {
Self::scan_with_filters(table_name, provider, projection, vec![])
Self::scan_with_filters(table_name, table_source, projection, vec![])
}

/// Convert a table provider into a builder with a TableScan
pub fn scan_with_filters(
table_name: impl Into<String>,
provider: Arc<dyn TableProvider>,
table_source: Arc<dyn TableSource>,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I wonder if we could make the APIs a little (avoid having to call provider_as_source) nicer with something like

Untested:

table_source: impl Into<Arc<dyn TableSource>>

Or add some other trait that would allow us to pass both Arc<dyn TableSource> as well as Arc<dyn TableProvider>

I think we can always refine the API in a follow on PR

projection: Option<Vec<usize>>,
filters: Vec<Expr>,
) -> Result<Self> {
Expand All @@ -212,7 +212,7 @@ impl LogicalPlanBuilder {
));
}

let schema = provider.schema();
let schema = table_source.schema();

let projected_schema = projection
.as_ref()
Expand All @@ -232,7 +232,7 @@ impl LogicalPlanBuilder {

let table_scan = LogicalPlan::TableScan(TableScan {
table_name,
source: provider_as_source(provider),
source: table_source,
projected_schema: Arc::new(projected_schema),
projection,
filters,
Expand Down
16 changes: 10 additions & 6 deletions datafusion/core/src/sql/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,11 @@ use crate::datasource::TableProvider;
use crate::logical_plan::window_frames::{WindowFrame, WindowFrameUnits};
use crate::logical_plan::Expr::Alias;
use crate::logical_plan::{
and, col, lit, normalize_col, normalize_col_with_schemas, Column, CreateCatalog,
CreateCatalogSchema, CreateExternalTable as PlanCreateExternalTable,
CreateMemoryTable, CreateView, DFSchema, DFSchemaRef, DropTable, Expr, FileType,
LogicalPlan, LogicalPlanBuilder, Operator, PlanType, ToDFSchema, ToStringifiedPlan,
and, col, lit, normalize_col, normalize_col_with_schemas, provider_as_source,
union_with_alias, Column, CreateCatalog, CreateCatalogSchema,
CreateExternalTable as PlanCreateExternalTable, CreateMemoryTable, CreateView,
DFSchema, DFSchemaRef, DropTable, Expr, FileType, LogicalPlan, LogicalPlanBuilder,
Operator, PlanType, ToDFSchema, ToStringifiedPlan,
};
use crate::prelude::JoinType;
use crate::scalar::ScalarValue;
Expand Down Expand Up @@ -713,8 +714,11 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
_ => Ok(cte_plan.clone()),
},
(_, Ok(provider)) => {
let scan =
LogicalPlanBuilder::scan(&table_name, provider, None);
let scan = LogicalPlanBuilder::scan(
&table_name,
provider_as_source(provider),
None,
);
let scan = match table_alias.as_ref() {
Some(ref name) => scan?.alias(name.to_owned().as_str()),
_ => scan,
Expand Down
8 changes: 6 additions & 2 deletions datafusion/core/src/test_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use std::collections::BTreeMap;
use std::{env, error::Error, path::PathBuf, sync::Arc};

use crate::datasource::empty::EmptyTable;
use crate::logical_plan::{LogicalPlanBuilder, UNNAMED_TABLE};
use crate::logical_plan::{provider_as_source, LogicalPlanBuilder, UNNAMED_TABLE};
use arrow::datatypes::{DataType, Field, Schema, SchemaRef};
use datafusion_common::DataFusionError;

Expand Down Expand Up @@ -243,7 +243,11 @@ pub fn scan_empty(
) -> Result<LogicalPlanBuilder, DataFusionError> {
let table_schema = Arc::new(table_schema.clone());
let provider = Arc::new(EmptyTable::new(table_schema));
LogicalPlanBuilder::scan(name.unwrap_or(UNNAMED_TABLE), provider, projection)
LogicalPlanBuilder::scan(
name.unwrap_or(UNNAMED_TABLE),
provider_as_source(provider),
projection,
)
}

/// Get the schema for the aggregate_test_* csv files
Expand Down
17 changes: 11 additions & 6 deletions datafusion/core/tests/parquet_pruning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ use arrow::{
util::pretty::pretty_format_batches,
};
use chrono::{Datelike, Duration};
use datafusion::logical_plan::provider_as_source;
use datafusion::{
datasource::TableProvider,
logical_plan::{col, lit, Expr, LogicalPlan, LogicalPlanBuilder},
Expand Down Expand Up @@ -544,12 +545,16 @@ impl ContextWithParquet {
/// the number of output rows and normalized execution metrics
async fn query_with_expr(&mut self, expr: Expr) -> TestOutput {
let sql = format!("EXPR only: {:?}", expr);
let logical_plan = LogicalPlanBuilder::scan("t", self.provider.clone(), None)
.unwrap()
.filter(expr)
.unwrap()
.build()
.unwrap();
let logical_plan = LogicalPlanBuilder::scan(
"t",
provider_as_source(self.provider.clone()),
None,
)
.unwrap()
.filter(expr)
.unwrap()
.build()
.unwrap();
self.run_test(logical_plan, sql).await
}

Expand Down
9 changes: 5 additions & 4 deletions datafusion/core/tests/sql/projection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.

use datafusion::logical_plan::{LogicalPlanBuilder, UNNAMED_TABLE};
use datafusion::logical_plan::{provider_as_source, LogicalPlanBuilder, UNNAMED_TABLE};
use datafusion::test_util::scan_empty;
use tempfile::TempDir;

Expand Down Expand Up @@ -239,9 +239,10 @@ async fn projection_on_memory_scan() -> Result<()> {
)?]];

let provider = Arc::new(MemTable::try_new(schema, partitions)?);
let plan = LogicalPlanBuilder::scan(UNNAMED_TABLE, provider, None)?
.project(vec![col("b")])?
.build()?;
let plan =
LogicalPlanBuilder::scan(UNNAMED_TABLE, provider_as_source(provider), None)?
.project(vec![col("b")])?
.build()?;
assert_fields_eq(&plan, vec!["b"]);

let ctx = SessionContext::new();
Expand Down