Skip to content

Commit

Permalink
[KYUUBI apache#5055] [Authz] Support building function privileges in …
Browse files Browse the repository at this point in the history
…Spark 3.4

### _Why are the changes needed?_

Add support for function privileges building in 3.4 for apache#4167

### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible

- [ ] Add screenshots for manual tests if appropriate

- [x] [Run test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests) locally before make a pull request

Closes apache#5055 from packyan/PR_4167_follow_up_support_spark_3.4.

Closes apache#5055

46fe89e [Deng An] add support for function privileges building in 3.4

Authored-by: Deng An <packyande@gmail.com>
Signed-off-by: liangbowen <liangbowen@gf.com.cn>
  • Loading branch information
packyan authored and bowenliang123 committed Aug 8, 2023
1 parent 3571634 commit 0a04f08
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -220,15 +220,25 @@ object PrivilegesBuilder {
plan: LogicalPlan,
spark: SparkSession): PrivilegesAndOpType = {
val inputObjs = new ArrayBuffer[PrivilegeObject]
// TODO: add support for Spark 3.4.x
plan transformAllExpressions {
case hiveFunction: Expression if isKnownFunction(hiveFunction) =>
val functionSpec: ScanSpec = getFunctionSpec(hiveFunction)
if (functionSpec.functionDescs.exists(!_.functionTypeDesc.get.skip(hiveFunction, spark))) {
functionSpec.functions(hiveFunction).foreach(func =>
inputObjs += PrivilegeObject(func))
plan match {
case command: Command if isKnownTableCommand(command) =>
val spec = getTableCommandSpec(command)
val functionPrivAndOpType = spec.queries(plan)
.map(plan => buildFunctions(plan, spark))
functionPrivAndOpType.map(_._1)
.reduce(_ ++ _)
.foreach(functionPriv => inputObjs += functionPriv)

case plan => plan transformAllExpressions {
case hiveFunction: Expression if isKnownFunction(hiveFunction) =>
val functionSpec: ScanSpec = getFunctionSpec(hiveFunction)
if (functionSpec.functionDescs
.exists(!_.functionTypeDesc.get.skip(hiveFunction, spark))) {
functionSpec.functions(hiveFunction).foreach(func =>
inputObjs += PrivilegeObject(func))
}
hiveFunction
}
hiveFunction
}
(inputObjs, Seq.empty, OperationType.QUERY)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ package org.apache.kyuubi.plugin.spark.authz.serde
* :: Developer API ::
*
* Represents a function identity
*
* @param catalog
* @param database
* @param functionName
*/
case class Function(database: Option[String], functionName: String)
case class Function(catalog: Option[String], database: Option[String], functionName: String)
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.kyuubi.plugin.spark.authz.serde
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.expressions.ExpressionInfo

import org.apache.kyuubi.plugin.spark.authz.serde.FunctionExtractor.buildFunctionIdentFromQualifiedName
import org.apache.kyuubi.plugin.spark.authz.serde.FunctionExtractor.buildFunctionFromQualifiedName

trait FunctionExtractor extends (AnyRef => Function) with Extractor

Expand All @@ -29,13 +29,16 @@ object FunctionExtractor {
loadExtractorsToMap[FunctionExtractor]
}

def buildFunctionIdentFromQualifiedName(qualifiedName: String): (String, Option[String]) = {
val parts: Array[String] = qualifiedName.split("\\.", 2)
if (parts.length == 1) {
(qualifiedName, None)
private[authz] def buildFunctionFromQualifiedName(qualifiedName: String): Function = {
val parts: Array[String] = qualifiedName.split("\\.")
val (catalog, database, functionName) = if (parts.length == 3) {
(Some(parts.head), Some(parts.tail.head), parts.last)
} else if (parts.length == 2) {
(None, Some(parts.head), parts.last)
} else {
(parts.last, Some(parts.head))
(None, None, qualifiedName)
}
Function(catalog, database, functionName)
}
}

Expand All @@ -44,7 +47,7 @@ object FunctionExtractor {
*/
class StringFunctionExtractor extends FunctionExtractor {
override def apply(v1: AnyRef): Function = {
Function(None, v1.asInstanceOf[String])
Function(None, None, v1.asInstanceOf[String])
}
}

Expand All @@ -54,8 +57,7 @@ class StringFunctionExtractor extends FunctionExtractor {
class QualifiedNameStringFunctionExtractor extends FunctionExtractor {
override def apply(v1: AnyRef): Function = {
val qualifiedName: String = v1.asInstanceOf[String]
val (funcName, database) = buildFunctionIdentFromQualifiedName(qualifiedName)
Function(database, funcName)
buildFunctionFromQualifiedName(qualifiedName)
}
}

Expand All @@ -65,7 +67,7 @@ class QualifiedNameStringFunctionExtractor extends FunctionExtractor {
class FunctionIdentifierFunctionExtractor extends FunctionExtractor {
override def apply(v1: AnyRef): Function = {
val identifier = v1.asInstanceOf[FunctionIdentifier]
Function(identifier.database, identifier.funcName)
Function(None, identifier.database, identifier.funcName)
}
}

Expand All @@ -75,6 +77,6 @@ class FunctionIdentifierFunctionExtractor extends FunctionExtractor {
class ExpressionInfoFunctionExtractor extends FunctionExtractor {
override def apply(v1: AnyRef): Function = {
val info = v1.asInstanceOf[ExpressionInfo]
Function(Option(info.getDb), info.getName)
Function(None, Option(info.getDb), info.getName)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.catalog.SessionCatalog

import org.apache.kyuubi.plugin.spark.authz.serde.FunctionExtractor.buildFunctionIdentFromQualifiedName
import org.apache.kyuubi.plugin.spark.authz.serde.FunctionExtractor.buildFunctionFromQualifiedName
import org.apache.kyuubi.plugin.spark.authz.serde.FunctionType.{FunctionType, PERMANENT, SYSTEM, TEMP}
import org.apache.kyuubi.plugin.spark.authz.serde.FunctionTypeExtractor.getFunctionType

Expand Down Expand Up @@ -93,7 +93,7 @@ class FunctionNameFunctionTypeExtractor extends FunctionTypeExtractor {
override def apply(v1: AnyRef, spark: SparkSession): FunctionType = {
val catalog: SessionCatalog = spark.sessionState.catalog
val qualifiedName: String = v1.asInstanceOf[String]
val (funcName, database) = buildFunctionIdentFromQualifiedName(qualifiedName)
getFunctionType(FunctionIdentifier(funcName, database), catalog)
val function = buildFunctionFromQualifiedName(qualifiedName)
getFunctionType(FunctionIdentifier(function.functionName, function.database), catalog)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import org.scalatest.funsuite.AnyFunSuite

import org.apache.kyuubi.plugin.spark.authz.OperationType.QUERY
import org.apache.kyuubi.plugin.spark.authz.ranger.AccessType
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.SPARK_RUNTIME_VERSION

abstract class FunctionPrivilegesBuilderSuite extends AnyFunSuite
with SparkSessionProvider with BeforeAndAfterAll with BeforeAndAfterEach {
Expand Down Expand Up @@ -112,7 +111,6 @@ class HiveFunctionPrivilegesBuilderSuite extends FunctionPrivilegesBuilderSuite
override protected val catalogImpl: String = "hive"

test("Function Call Query") {
assume(SPARK_RUNTIME_VERSION <= "3.3")
val plan = sql(s"SELECT kyuubi_fun_1('data'), " +
s"kyuubi_fun_2(value), " +
s"${reusedDb}.kyuubi_fun_0(value), " +
Expand All @@ -132,7 +130,6 @@ class HiveFunctionPrivilegesBuilderSuite extends FunctionPrivilegesBuilderSuite
}

test("Function Call Query with Quoted Name") {
assume(SPARK_RUNTIME_VERSION <= "3.3")
val plan = sql(s"SELECT `kyuubi_fun_1`('data'), " +
s"`kyuubi_fun_2`(value), " +
s"`${reusedDb}`.`kyuubi_fun_0`(value), " +
Expand All @@ -152,7 +149,6 @@ class HiveFunctionPrivilegesBuilderSuite extends FunctionPrivilegesBuilderSuite
}

test("Simple Function Call Query") {
assume(SPARK_RUNTIME_VERSION <= "3.3")
val plan = sql(s"SELECT kyuubi_fun_1('data'), " +
s"kyuubi_fun_0('value'), " +
s"${reusedDb}.kyuubi_fun_0('value'), " +
Expand All @@ -172,7 +168,6 @@ class HiveFunctionPrivilegesBuilderSuite extends FunctionPrivilegesBuilderSuite
}

test("Function Call In CAST Command") {
assume(SPARK_RUNTIME_VERSION <= "3.3")
val table = "castTable"
withTable(table) { table =>
val plan = sql(s"CREATE TABLE ${table} " +
Expand Down

0 comments on commit 0a04f08

Please sign in to comment.