From 64909117aa42911bb9a3d5a279e4e0c6e0ab0412 Mon Sep 17 00:00:00 2001 From: Aurelius84 Date: Mon, 8 Apr 2024 12:47:25 +0800 Subject: [PATCH 1/5] [PIR+CINN]Fix parallel compilation value symbolic update --- .../lower_cinn_fusion_op_pass.cc | 2 + .../transforms/lowering_pass/utils.cc | 37 ++++++------ paddle/cinn/hlir/framework/pir/group_info.h | 40 +++++++++++++ .../hlir/framework/pir/op_lowering_group.cc | 19 ++++++ .../hlir/framework/pir/op_lowering_group.h | 58 +++++++++---------- paddle/cinn/hlir/framework/pir/utils.cc | 8 +-- 6 files changed, 112 insertions(+), 52 deletions(-) create mode 100644 paddle/cinn/hlir/framework/pir/group_info.h diff --git a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc index 0e7ebb8e9499d..1952d25880802 100644 --- a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc +++ b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc @@ -34,6 +34,8 @@ pir::Operation* ProcessDyShapeGroup( const OpLoweringGroupPtr& group, pir::ShapeConstraintIRAnalysis& shape_analysis, // NOLINT pir::PatternRewriter& rewriter) { // NOLINT + // NOTE(dev): Need UpdateShapeOrDataExprs firstly. + group->UpdateShapeOrDataExprs(); auto group_inputs = GetBlockOutsideInput(group->ops()); GroupDimExprInfo group_dim_expr_info = GetGroupDimExprInfo(group); const auto& leaves = group_dim_expr_info.all_value_dim_exprs; diff --git a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.cc b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.cc index e4724c617dfaf..c8d4f435e372b 100644 --- a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.cc +++ b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.cc @@ -88,33 +88,36 @@ std::unordered_map GetJitKernelAttr( OpLoweringGroupPtr BuildOpLoweringGroup(pir::Operation* fusion_op_ptr) { auto fusion_op = fusion_op_ptr->dyn_cast(); - auto group = std::make_shared(); - group->set_op_pattern_kind( - cinn::hlir::framework::OpPatternKind::kElementWise); + std::vector<::pir::Operation*> ops; + auto group_op_kind = cinn::hlir::framework::OpPatternKind::kElementWise; + // Rebuild ops of the group + for (auto op : fusion_op.GetOperators()) { + if (!op->isa<::pir::YieldOp>()) { + ops.push_back(op); + group_op_kind = static_cast(CompatibleInfo::OpKind(*op)) > + static_cast(group_op_kind) + ? CompatibleInfo::OpKind(*op) + : group_op_kind; + } + } + + auto group = std::make_shared(ops); + if (fusion_op.attributes().count("group_info")) { auto attr = fusion_op.attribute("group_info") .dyn_cast() .data(); - group->set_op_pattern_kind(attr.op_pattern_kind); + group_op_kind = + static_cast(attr.op_pattern_kind) > static_cast(group_op_kind) + ? attr.op_pattern_kind + : group_op_kind; group->set_loop_ranges(attr.loop_ranges); group->set_loop_ranges_expr(attr.loop_ranges_expr); - group->set_reduce_axis(attr.reduce_axis); group->set_alignment_schedule_info(attr.alignment_schedule_info); } - - // Rebuild ops of the group - for (auto op : fusion_op.GetOperators()) { - if (!op->isa<::pir::YieldOp>()) { - group->mut_ops().push_back(op); - auto op_pattern_kind = static_cast(CompatibleInfo::OpKind(*op)) > - static_cast(group->op_pattern_kind()) - ? CompatibleInfo::OpKind(*op) - : group->op_pattern_kind(); - group->set_op_pattern_kind(op_pattern_kind); - } - } + group->set_op_pattern_kind(group_op_kind); // Rebuild output_ops and input_ops of the group auto yield_op = fusion_op.GetOperators().back(); diff --git a/paddle/cinn/hlir/framework/pir/group_info.h b/paddle/cinn/hlir/framework/pir/group_info.h new file mode 100644 index 0000000000000..5dd2a631fb9ea --- /dev/null +++ b/paddle/cinn/hlir/framework/pir/group_info.h @@ -0,0 +1,40 @@ +// Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once +#include +#include + +namespace cinn::hlir::framework::pir { +class OpLoweringGroup; + +class OperationInfo {}; + +class GroupInfo { + public: + explicit GroupInfo(const std::shared_ptr& group); +}; + +std::ostream& operator<<(std::ostream& os, const GroupInfo& group_info); + +} // namespace cinn::hlir::framework::pir + +namespace std { +template <> +struct hash { + std::size_t operator()( + const cinn::hlir::framework::pir::GroupInfo& obj) const; +}; + +} // namespace std diff --git a/paddle/cinn/hlir/framework/pir/op_lowering_group.cc b/paddle/cinn/hlir/framework/pir/op_lowering_group.cc index 8799c84969a04..59cb4460bbb1c 100644 --- a/paddle/cinn/hlir/framework/pir/op_lowering_group.cc +++ b/paddle/cinn/hlir/framework/pir/op_lowering_group.cc @@ -19,6 +19,25 @@ namespace hlir { namespace framework { namespace pir { +void OpLoweringGroup::UpdateShapeOrDataExprs() { + const auto& input_values = this->GetInputOpValues(); + if (input_values.size() == 0) return; + const auto& UpdateDimExprs = + [&](const ::pir::Value& value) -> decltype(auto) { + auto* program = value.defining_op()->GetParentProgram(); + auto& shape_analysis = ::pir::ShapeAnalysisManager::Instance().Get(program); + if (!shape_analysis.HasShapeOrDataForValue(value)) return; + VLOG(6) << "UpdateShapeOrDataExprs for value_id: " << value.impl(); + this->SetShapeOrDataExprs(value, + shape_analysis.GetShapeOrDataForValue(value)); + }; + + for (const auto& value : input_values) { + if (!value || !value.defining_op()) continue; + UpdateDimExprs(value); + } +} + std::shared_ptr OpLoweringGroup::Clone( ::pir::Block* target_block, ::pir::IrMapping* ir_mapping) const { std::vector<::pir::Operation*> new_ops; diff --git a/paddle/cinn/hlir/framework/pir/op_lowering_group.h b/paddle/cinn/hlir/framework/pir/op_lowering_group.h index aaa2f31f0a60c..9639e1c9929fc 100644 --- a/paddle/cinn/hlir/framework/pir/op_lowering_group.h +++ b/paddle/cinn/hlir/framework/pir/op_lowering_group.h @@ -22,6 +22,7 @@ #include "paddle/cinn/common/context.h" #include "paddle/cinn/hlir/framework/op.h" #include "paddle/cinn/hlir/framework/pir/utils.h" +#include "paddle/common/enforce.h" #include "paddle/pir/include/core/builtin_type_interfaces.h" #include "paddle/pir/include/core/operation.h" #include "paddle/pir/include/core/value.h" @@ -38,15 +39,18 @@ namespace framework { namespace pir { class OpLoweringGroup { public: - OpLoweringGroup() = default; OpLoweringGroup(const OpLoweringGroup&) = delete; OpLoweringGroup(OpLoweringGroup&&) = delete; explicit OpLoweringGroup(const std::vector<::pir::Operation*>& group_ops) - : ops_(group_ops) {} + : ops_(group_ops) { + fn_name_ = CompatibleInfo::GroupOpsName(ops_); + } explicit OpLoweringGroup(std::initializer_list<::pir::Operation*> group_ops) - : ops_(group_ops) {} + : ops_(group_ops) { + fn_name_ = CompatibleInfo::GroupOpsName(ops_); + } struct SharedGroupHasher { size_t operator()( @@ -88,27 +92,18 @@ class OpLoweringGroup { std::unordered_set<::pir::Value> GetInputOpValues() const { std::unordered_set<::pir::Value> group_inputs; - - std::unordered_set<::pir::Operation*> ops_set; - for (auto op : this->ops_) { - ops_set.insert(op); - } + std::unordered_set<::pir::Operation*> ops_set(this->ops_.begin(), + this->ops_.end()); // count all op's input Value - for (auto op : this->ops_) { + for (auto op : ops_set) { for (auto& value : op->operands_source()) { - if (!value || !value.type()) { - continue; - } - - if (!ops_set.count(value.defining_op())) { - // if the input value owner op is not in OpSet, it's the group's input - group_inputs.insert(value); + if (!value || !value.type() || ops_set.count(value.defining_op())) continue; - } + // if the input value owner op is not in OpSet, it's the group's input + group_inputs.insert(value); } } - return group_inputs; } @@ -127,19 +122,13 @@ class OpLoweringGroup { return group_outputs; } - std::string FuncName() const { - if (fn_name_ == "") { - // TODO(Aurelius84): Polish this implementation. - const_cast(this)->fn_name_ = - CompatibleInfo::GroupOpsName(ops_); - } - return this->fn_name_; - } + const std::string& FuncName() const { return fn_name_; } const symbol::ShapeOrDataDimExprs& GetShapeOrDataExprs( const ::pir::Value& value) const { - CHECK(value_to_shape_or_data_exprs_.count(value)) - << "value not found in value_to_shape_or_data_exprs_"; + PADDLE_ENFORCE(HasShapeOrDataExprs(value), + ::common::errors::NotFound( + "value not found in value_to_shape_or_data_exprs_")); return value_to_shape_or_data_exprs_.at(value); } @@ -157,6 +146,8 @@ class OpLoweringGroup { } } + void OpLoweringGroup::UpdateShapeOrDataExprs(); + void WalkOps(const std::function& VisitOp) const { for (const auto& op : ops_) { VisitOp(op); @@ -198,12 +189,17 @@ class OpLoweringGroup { } std::shared_ptr mut_map_expr_ctx() { - CHECK_NOTNULL(map_expr_ctx_); + PADDLE_ENFORCE_NOT_NULL( + map_expr_ctx_, + ::common::errors::Unavailable("Required map_expr_ctx_ != nullptr.")); return map_expr_ctx_; } const adt::MapExprCtx& map_expr_ctx() const { - return *CHECK_NOTNULL(map_expr_ctx_); + PADDLE_ENFORCE_NOT_NULL( + map_expr_ctx_, + ::common::errors::Unavailable("Required map_expr_ctx_ != nullptr.")); + return *map_expr_ctx_; } void set_value_to_shape_or_data_exprs( @@ -285,6 +281,7 @@ class OpLoweringGroup { std::string group_id_{common::UniqName("group_")}; // op in this group std::vector<::pir::Operation*> ops_; + std::string fn_name_; // output ops of the group. std::unordered_set<::pir::Operation*> output_ops_; // op pattern kind. @@ -293,7 +290,6 @@ class OpLoweringGroup { std::vector input_names_; std::vector output_names_; std::vector<::pir::Value> output_values_; - std::string fn_name_{""}; std::map int_args_map_; alignment_schedule_info_t alignment_schedule_info_; diff --git a/paddle/cinn/hlir/framework/pir/utils.cc b/paddle/cinn/hlir/framework/pir/utils.cc index 942bf35f3f8eb..4ba4bc6d3b276 100644 --- a/paddle/cinn/hlir/framework/pir/utils.cc +++ b/paddle/cinn/hlir/framework/pir/utils.cc @@ -419,12 +419,12 @@ std::string CompatibleInfo::OpFuncName(const ::pir::Operation& op) { std::string CompatibleInfo::GroupOpsName( const std::vector<::pir::Operation*>& ops) { - std::string name = "fn"; + std::string name = "fn_"; for (auto* op : ops) { - std::string op_name = OpName(*op); - name += "_" + cinn::common::Context::Global().NewName(op_name); + name += OpName(*op); + name += "_"; } - return name; + return cinn::common::Context::Global().NewName(name); } std::string CompatibleInfo::ValueName(const ::pir::Value& value) { From e9f620eb1c64cbd565868b2ba2a5b3377c7d331c Mon Sep 17 00:00:00 2001 From: Aurelius84 Date: Mon, 8 Apr 2024 09:34:09 +0000 Subject: [PATCH 2/5] fix typo --- paddle/cinn/hlir/framework/pir/op_lowering_group.h | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/paddle/cinn/hlir/framework/pir/op_lowering_group.h b/paddle/cinn/hlir/framework/pir/op_lowering_group.h index 9639e1c9929fc..6e5e5625fa95c 100644 --- a/paddle/cinn/hlir/framework/pir/op_lowering_group.h +++ b/paddle/cinn/hlir/framework/pir/op_lowering_group.h @@ -126,9 +126,10 @@ class OpLoweringGroup { const symbol::ShapeOrDataDimExprs& GetShapeOrDataExprs( const ::pir::Value& value) const { - PADDLE_ENFORCE(HasShapeOrDataExprs(value), - ::common::errors::NotFound( - "value not found in value_to_shape_or_data_exprs_")); + PADDLE_ENFORCE_EQ(HasShapeOrDataExprs(value), + true, + ::common::errors::NotFound( + "value not found in value_to_shape_or_data_exprs_")); return value_to_shape_or_data_exprs_.at(value); } @@ -146,7 +147,7 @@ class OpLoweringGroup { } } - void OpLoweringGroup::UpdateShapeOrDataExprs(); + void UpdateShapeOrDataExprs(); void WalkOps(const std::function& VisitOp) const { for (const auto& op : ops_) { From cfc4167fadfe2d320d6a4d631db5e6af8b179e22 Mon Sep 17 00:00:00 2001 From: Aurelius84 Date: Mon, 8 Apr 2024 10:30:10 +0000 Subject: [PATCH 3/5] del usless file --- paddle/cinn/hlir/framework/pir/group_info.h | 40 --------------------- 1 file changed, 40 deletions(-) delete mode 100644 paddle/cinn/hlir/framework/pir/group_info.h diff --git a/paddle/cinn/hlir/framework/pir/group_info.h b/paddle/cinn/hlir/framework/pir/group_info.h deleted file mode 100644 index 5dd2a631fb9ea..0000000000000 --- a/paddle/cinn/hlir/framework/pir/group_info.h +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#pragma once -#include -#include - -namespace cinn::hlir::framework::pir { -class OpLoweringGroup; - -class OperationInfo {}; - -class GroupInfo { - public: - explicit GroupInfo(const std::shared_ptr& group); -}; - -std::ostream& operator<<(std::ostream& os, const GroupInfo& group_info); - -} // namespace cinn::hlir::framework::pir - -namespace std { -template <> -struct hash { - std::size_t operator()( - const cinn::hlir::framework::pir::GroupInfo& obj) const; -}; - -} // namespace std From f20e9a9bd4cea322d84c76222a2d536d72459ccf Mon Sep 17 00:00:00 2001 From: Aurelius84 Date: Tue, 9 Apr 2024 04:58:45 +0000 Subject: [PATCH 4/5] fix comment --- .../lower_cinn_fusion_op_pass.cc | 5 ++-- .../transforms/lowering_pass/utils.cc | 12 ++++++--- .../operator/transforms/lowering_pass/utils.h | 2 ++ .../hlir/framework/pir/op_lowering_group.cc | 26 +++++++------------ .../hlir/framework/pir/op_lowering_group.h | 4 +-- 5 files changed, 24 insertions(+), 25 deletions(-) diff --git a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc index 1952d25880802..287b4fd7fbe4d 100644 --- a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc +++ b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc @@ -34,8 +34,9 @@ pir::Operation* ProcessDyShapeGroup( const OpLoweringGroupPtr& group, pir::ShapeConstraintIRAnalysis& shape_analysis, // NOLINT pir::PatternRewriter& rewriter) { // NOLINT - // NOTE(dev): Need UpdateShapeOrDataExprs firstly. - group->UpdateShapeOrDataExprs(); + // NOTE(dev): Need UpdateShapeOrDataExprs firstly and the logic + // will be migated into BucketLower later. + UpdateGroupShapeOrDataExprs(const_cast(group)); auto group_inputs = GetBlockOutsideInput(group->ops()); GroupDimExprInfo group_dim_expr_info = GetGroupDimExprInfo(group); const auto& leaves = group_dim_expr_info.all_value_dim_exprs; diff --git a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.cc b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.cc index c8d4f435e372b..12023e76eb198 100644 --- a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.cc +++ b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.cc @@ -130,10 +130,7 @@ OpLoweringGroupPtr BuildOpLoweringGroup(pir::Operation* fusion_op_ptr) { // Because the group is rebuilt, the order of group.output_values generated // by BuildCUDAJITInfo may not be same with the order bound in the yield op, // so a mapping is required. - auto& shape_analysis = - pir::ShapeAnalysisManager::Instance().Get(fusion_op->GetParentProgram()); - group->set_value_to_shape_or_data_exprs( - CreateGroupShapeOrDataExprs(group, shape_analysis)); + UpdateGroupShapeOrDataExprs(group); if (FLAGS_cinn_enable_map_expr) { cinn::adt::TryGenerateMapExprFromGroup(group); } @@ -142,4 +139,11 @@ OpLoweringGroupPtr BuildOpLoweringGroup(pir::Operation* fusion_op_ptr) { return group; } +void UpdateGroupShapeOrDataExprs(OpLoweringGroupPtr group) { + auto& shape_analysis = + pir::ShapeAnalysisManager::Instance().Get(group->GetParentProgram()); + group->set_value_to_shape_or_data_exprs( + CreateGroupShapeOrDataExprs(group, shape_analysis)); +} + } // namespace cinn::dialect::ir::details diff --git a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.h b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.h index 3b3ba4379d57c..5c5d0c104390a 100644 --- a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.h +++ b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/utils.h @@ -31,4 +31,6 @@ std::unordered_map GetJitKernelAttr( OpLoweringGroupPtr BuildOpLoweringGroup(pir::Operation* fusion_op_ptr); +void UpdateGroupShapeOrDataExprs(OpLoweringGroupPtr group); + } // namespace cinn::dialect::ir::details diff --git a/paddle/cinn/hlir/framework/pir/op_lowering_group.cc b/paddle/cinn/hlir/framework/pir/op_lowering_group.cc index 59cb4460bbb1c..b8622ee53db15 100644 --- a/paddle/cinn/hlir/framework/pir/op_lowering_group.cc +++ b/paddle/cinn/hlir/framework/pir/op_lowering_group.cc @@ -19,23 +19,15 @@ namespace hlir { namespace framework { namespace pir { -void OpLoweringGroup::UpdateShapeOrDataExprs() { - const auto& input_values = this->GetInputOpValues(); - if (input_values.size() == 0) return; - const auto& UpdateDimExprs = - [&](const ::pir::Value& value) -> decltype(auto) { - auto* program = value.defining_op()->GetParentProgram(); - auto& shape_analysis = ::pir::ShapeAnalysisManager::Instance().Get(program); - if (!shape_analysis.HasShapeOrDataForValue(value)) return; - VLOG(6) << "UpdateShapeOrDataExprs for value_id: " << value.impl(); - this->SetShapeOrDataExprs(value, - shape_analysis.GetShapeOrDataForValue(value)); - }; - - for (const auto& value : input_values) { - if (!value || !value.defining_op()) continue; - UpdateDimExprs(value); - } +::pir::Program* OpLoweringGroup::GetParentProgram() const { + PADDLE_ENFORCE_GT(ops_.size(), + 0, + ::common::errors::PreconditionNotMet( + "Require at least one op in the group.")); + PADDLE_ENFORCE_NOT_NULL( + ops_[0], + ::common::errors::Unavailable("Found group.ops_[0] is nullptr.")); + return ops_[0]->GetParentProgram(); } std::shared_ptr OpLoweringGroup::Clone( diff --git a/paddle/cinn/hlir/framework/pir/op_lowering_group.h b/paddle/cinn/hlir/framework/pir/op_lowering_group.h index 6e5e5625fa95c..8c54c810044cd 100644 --- a/paddle/cinn/hlir/framework/pir/op_lowering_group.h +++ b/paddle/cinn/hlir/framework/pir/op_lowering_group.h @@ -147,8 +147,6 @@ class OpLoweringGroup { } } - void UpdateShapeOrDataExprs(); - void WalkOps(const std::function& VisitOp) const { for (const auto& op : ops_) { VisitOp(op); @@ -203,6 +201,8 @@ class OpLoweringGroup { return *map_expr_ctx_; } + ::pir::Program* GetParentProgram() const; + void set_value_to_shape_or_data_exprs( const std::unordered_map<::pir::Value, symbol::ShapeOrDataDimExprs>& value_to_shape_or_data_exprs) { From a3b36891229574a1a0d03a71441384ac392a5e24 Mon Sep 17 00:00:00 2001 From: Aurelius84 Date: Tue, 9 Apr 2024 05:56:52 +0000 Subject: [PATCH 5/5] fix typo --- .../transforms/lowering_pass/lower_cinn_fusion_op_pass.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc index 287b4fd7fbe4d..3fa26f51b5592 100644 --- a/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc +++ b/paddle/cinn/hlir/dialect/operator/transforms/lowering_pass/lower_cinn_fusion_op_pass.cc @@ -36,7 +36,7 @@ pir::Operation* ProcessDyShapeGroup( pir::PatternRewriter& rewriter) { // NOLINT // NOTE(dev): Need UpdateShapeOrDataExprs firstly and the logic // will be migated into BucketLower later. - UpdateGroupShapeOrDataExprs(const_cast(group)); + UpdateGroupShapeOrDataExprs(const_cast(group)); auto group_inputs = GetBlockOutsideInput(group->ops()); GroupDimExprInfo group_dim_expr_info = GetGroupDimExprInfo(group); const auto& leaves = group_dim_expr_info.all_value_dim_exprs;