Skip to content

Commit 1bf71a8

Browse files
LennonChinbowenliang123
authored andcommitted
[KYUUBI #7202] [AUTHZ] make FilteredShowObjectsExec extends V2CommandExec to avoid running executor side task
Now FilteredShowObjectsExec is not a subclass of V2CommandExec, when it was executed it will invoke `def doExecute(): RDD[InternalRow]` method to submit executor side task to run, which is slow and resource wasting. V2CommandExec implemented `def executeCollect(): Array[InternalRow]` to avoid running `def doExecute(): RDD[InternalRow]`. ### Why are the changes needed? Improve perfomance. ### How was this patch tested? exists unit tests. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #7202 from LennonChin/authz_filtered_show_objects. Closes #7202 10b55e6 [Lennon Chin] [AUTHZ] make FilteredShowObjectsExec extends V2CommandExec to avoid running executor side task Authored-by: Lennon Chin <i@coderap.com> Signed-off-by: Bowen Liang <liangbowen@gf.com.cn>
1 parent fc04a3a commit 1bf71a8

File tree

1 file changed

+10
-10
lines changed

1 file changed

+10
-10
lines changed

extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilteredShowObjectsExec.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,31 +18,28 @@ package org.apache.kyuubi.plugin.spark.authz.rule.rowfilter
1818

1919
import org.apache.hadoop.security.UserGroupInformation
2020
import org.apache.spark.SparkContext
21-
import org.apache.spark.rdd.RDD
2221
import org.apache.spark.sql.catalyst.InternalRow
2322
import org.apache.spark.sql.catalyst.expressions.Attribute
2423
import org.apache.spark.sql.execution.{LeafExecNode, SparkPlan}
24+
import org.apache.spark.sql.execution.datasources.v2.V2CommandExec
2525

2626
import org.apache.kyuubi.plugin.spark.authz.{ObjectType, OperationType}
2727
import org.apache.kyuubi.plugin.spark.authz.ranger.{AccessRequest, AccessResource, AccessType, SparkRangerAdminPlugin}
2828
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils
2929

30-
trait FilteredShowObjectsExec extends LeafExecNode {
30+
trait FilteredShowObjectsExec extends V2CommandExec with LeafExecNode {
3131
def result: Array[InternalRow]
32-
33-
override def output: Seq[Attribute]
34-
35-
final override def doExecute(): RDD[InternalRow] = {
36-
sparkContext.parallelize(result, 1)
37-
}
3832
}
3933

4034
trait FilteredShowObjectsCheck {
4135
def isAllowed(r: InternalRow, ugi: UserGroupInformation): Boolean
4236
}
4337

4438
case class FilteredShowNamespaceExec(result: Array[InternalRow], output: Seq[Attribute])
45-
extends FilteredShowObjectsExec {}
39+
extends FilteredShowObjectsExec {
40+
override protected def run(): Seq[InternalRow] = result
41+
}
42+
4643
object FilteredShowNamespaceExec extends FilteredShowObjectsCheck {
4744
def apply(delegated: SparkPlan, sc: SparkContext): FilteredShowNamespaceExec = {
4845
val result = delegated.executeCollect()
@@ -60,7 +57,10 @@ object FilteredShowNamespaceExec extends FilteredShowObjectsCheck {
6057
}
6158

6259
case class FilteredShowTablesExec(result: Array[InternalRow], output: Seq[Attribute])
63-
extends FilteredShowObjectsExec {}
60+
extends FilteredShowObjectsExec {
61+
override protected def run(): Seq[InternalRow] = result
62+
}
63+
6464
object FilteredShowTablesExec extends FilteredShowObjectsCheck {
6565
def apply(delegated: SparkPlan, sc: SparkContext): FilteredShowNamespaceExec = {
6666
val result = delegated.executeCollect()

0 commit comments

Comments
 (0)