This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 2ed0990b7 [KYUUBI #4676] [AUTHZ] Reuse users and namespaces in both 
tests and policy file generation
2ed0990b7 is described below

commit 2ed0990b7304ca5986ab116e02f7508e3d732549
Author: liangbowen <[email protected]>
AuthorDate: Wed Apr 19 18:06:06 2023 +0800

    [KYUUBI #4676] [AUTHZ] Reuse users and namespaces in both tests and policy 
file generation
    
    ### _Why are the changes needed?_
    
    - align the same list of users and namespaces used in tests and in policy 
file generation, as users and namespaces are the most important elements of 
Ranger policy's conditions and resources.
    - help to improve and simplify the decision in Authz testing and make a 
clear view of what's exactly tested and authorized, and very handy and easy to 
see the usage link in IDE
    - reduce possible abuse and untracable uses of authorized and unauthorized 
users, rules, resources. (We have up to 4 unauthorized users in separated 
tests!)
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests)
 locally before make a pull request
    
    Closes #4676 from bowenliang123/authz-gen-common.
    
    Closes #4676
    
    dc535a4d8 [liangbowen] authz-gen-common
    
    Authored-by: liangbowen <[email protected]>
    Signed-off-by: liangbowen <[email protected]>
---
 .../spark/authz/gen/PolicyJsonFileGenerator.scala  |  20 +-
 .../plugin/spark/authz/gen/RangerGenWrapper.scala  |   2 +-
 .../spark/authz/PrivilegesBuilderSuite.scala       |  40 ++--
 .../plugin/spark/authz/RangerTestResources.scala   |  45 ++++
 .../plugin/spark/authz/SparkSessionProvider.scala  |  12 +-
 .../spark/authz/V2CommandsPrivilegesSuite.scala    |   9 +-
 .../IcebergCatalogRangerSparkExtensionSuite.scala  |  46 ++--
 .../authz/ranger/RangerSparkExtensionSuite.scala   | 260 +++++++++++----------
 .../authz/ranger/SparkRangerAdminPluginSuite.scala |  10 +-
 ...JdbcTableCatalogRangerSparkExtensionSuite.scala |  60 ++---
 .../ranger/datamasking/DataMaskingTestBase.scala   |  75 +++---
 .../ranger/rowfiltering/RowFilteringTestBase.scala |  37 +--
 12 files changed, 329 insertions(+), 287 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/PolicyJsonFileGenerator.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/PolicyJsonFileGenerator.scala
index 8dbc802b8..e53d77197 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/PolicyJsonFileGenerator.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/PolicyJsonFileGenerator.scala
@@ -31,7 +31,9 @@ import org.apache.ranger.plugin.model.RangerPolicy
 // scalastyle:off
 import org.scalatest.funsuite.AnyFunSuite
 
-import org.apache.kyuubi.plugin.spark.authz.gen.KRangerPolicyItemAccess._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
+import 
org.apache.kyuubi.plugin.spark.authz.gen.KRangerPolicyItemAccess.allowTypes
 import org.apache.kyuubi.plugin.spark.authz.gen.KRangerPolicyResource._
 import org.apache.kyuubi.plugin.spark.authz.gen.RangerAccessType._
 import org.apache.kyuubi.plugin.spark.authz.gen.RangerClassConversions._
@@ -127,22 +129,6 @@ class PolicyJsonFileGenerator extends AnyFunSuite {
       }
   }
 
-  // users
-  private val admin = "admin"
-  private val bob = "bob"
-  private val kent = "kent"
-  private val permViewUser = "perm_view_user"
-  private val ownerPlaceHolder = "{OWNER}"
-  private val createOnlyUser = "create_only_user"
-  private val defaultTableOwner = "default_table_owner"
-  private val permViewOnlyUser = "user_perm_view_only"
-
-  // db
-  private val defaultDb = "default"
-  private val sparkCatalog = "spark_catalog"
-  private val icebergNamespace = "iceberg_ns"
-  private val namespace1 = "ns1"
-
   // resources
   private val allDatabaseRes = databaseRes("*")
   private val allTableRes = tableRes("*")
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/RangerGenWrapper.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/RangerGenWrapper.scala
index 14405f816..71bce3759 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/RangerGenWrapper.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/gen/scala/org/apache/kyuubi/plugin/spark/authz/gen/RangerGenWrapper.scala
@@ -22,7 +22,7 @@ import scala.language.implicitConversions
 import org.apache.ranger.plugin.model.RangerPolicy
 import org.apache.ranger.plugin.model.RangerPolicy._
 
-import 
org.apache.kyuubi.plugin.spark.authz.gen.RangerClassConversions.getRangerObject
+import org.apache.kyuubi.plugin.spark.authz.gen.RangerClassConversions._
 
 trait RangerObjectGenerator[T] {
   def get: T
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index e9483eb34..af4a7c262 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -30,6 +30,8 @@ import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 import org.scalatest.funsuite.AnyFunSuite
 
 import org.apache.kyuubi.plugin.spark.authz.OperationType._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import org.apache.kyuubi.plugin.spark.authz.ranger.AccessType
 import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils
 import 
org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.isSparkVersionAtMost
@@ -122,8 +124,8 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.DATABASE)
     assert(po.catalog.isEmpty)
-    assert(po.dbname === "default")
-    assert(po.objectName === "default")
+    assert(po.dbname === defaultDb)
+    assert(po.objectName === defaultDb)
     assert(po.columns.isEmpty)
   }
 
@@ -365,7 +367,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
     assert(po.catalog.isEmpty)
-    assert(po.dbname === (if (isSparkV2) null else "default"))
+    assert(po.dbname === (if (isSparkV2) null else defaultDb))
     assert(po.objectName === "AlterViewAsCommand")
     checkTableOwner(po)
     assert(po.columns.isEmpty)
@@ -521,7 +523,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
     assert(po.catalog.isEmpty)
-    assert(po.dbname === (if (isSparkV2) null else "default"))
+    assert(po.dbname === (if (isSparkV2) null else defaultDb))
     assert(po.objectName === "CreateViewCommand")
     assert(po.columns.isEmpty)
     val accessType = ranger.AccessType(po, operationType, isInput = false)
@@ -541,7 +543,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
       assert(po.actionType === PrivilegeObjectActionType.OTHER)
       assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
       assert(po.catalog.isEmpty)
-      assert(po.dbname === (if (isSparkV2) null else "default"))
+      assert(po.dbname === (if (isSparkV2) null else defaultDb))
       assert(po.objectName === tableName)
       assert(po.columns.isEmpty)
       val accessType = ranger.AccessType(po, operationType, isInput = false)
@@ -588,7 +590,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.FUNCTION)
     assert(po.catalog.isEmpty)
-    val db = if (isSparkV33OrGreater) "default" else null
+    val db = if (isSparkV33OrGreater) defaultDb else null
     assert(po.dbname === db)
     assert(po.objectName === "CreateFunctionCommand")
     assert(po.columns.isEmpty)
@@ -620,7 +622,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.FUNCTION)
     assert(po.catalog.isEmpty)
-    val db = if (isSparkV33OrGreater) "default" else null
+    val db = if (isSparkV33OrGreater) defaultDb else null
     assert(po.dbname === db)
     assert(po.objectName === "DropFunctionCommand")
     assert(po.columns.isEmpty)
@@ -641,7 +643,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.FUNCTION)
     assert(po.catalog.isEmpty)
-    val db = if (isSparkV33OrGreater) "default" else null
+    val db = if (isSparkV33OrGreater) defaultDb else null
     assert(po.dbname === db)
     assert(po.objectName === "RefreshFunctionCommand")
     assert(po.columns.isEmpty)
@@ -1267,8 +1269,8 @@ class InMemoryPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.DATABASE)
     assert(po.catalog.isEmpty)
-    assert(po.dbname === "default")
-    assert(po.objectName === "default")
+    assert(po.dbname === defaultDb)
+    assert(po.objectName === defaultDb)
     assert(po.columns.isEmpty)
     val accessType = ranger.AccessType(po, operationType, isInput = false)
     assert(accessType === AccessType.ALTER)
@@ -1296,7 +1298,7 @@ class InMemoryPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
     assert(po.catalog.isEmpty)
-    assert(po.dbname === (if (isSparkV2) null else "default"))
+    assert(po.dbname === (if (isSparkV2) null else defaultDb))
     assert(po.objectName === "CreateDataSourceTableAsSelectCommand")
     if (catalogImpl == "hive") {
       assert(po.columns === Seq("key", "value"))
@@ -1328,7 +1330,7 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
       assert(po.actionType === PrivilegeObjectActionType.OTHER)
       assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
       assert(po.catalog.isEmpty)
-      assert(po.dbname === "default")
+      assert(po.dbname === defaultDb)
       assert(po.objectName === t)
       assert(po.columns.head === "pid")
       checkTableOwner(po)
@@ -1350,7 +1352,7 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
       assert(po.actionType === PrivilegeObjectActionType.OTHER)
       assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
       assert(po.catalog.isEmpty)
-      assert(po.dbname === "default")
+      assert(po.dbname === defaultDb)
       assert(po.objectName === "CreateTableCommand")
       assert(po.columns.isEmpty)
       val accessType = ranger.AccessType(po, operationType, isInput = false)
@@ -1382,7 +1384,7 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
     assert(po.catalog.isEmpty)
-    assert(po.dbname === "default")
+    assert(po.dbname === defaultDb)
     assert(po.objectName === "CreateHiveTableAsSelectCommand")
     assert(po.columns === Seq("key", "value"))
     val accessType = ranger.AccessType(po, operationType, isInput = false)
@@ -1493,7 +1495,7 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
         assert(po.actionType === PrivilegeObjectActionType.INSERT)
         assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
         assert(po.catalog.isEmpty)
-        assert(po.dbname equalsIgnoreCase "default")
+        assert(po.dbname equalsIgnoreCase defaultDb)
         assert(po.objectName equalsIgnoreCase tableName)
         assert(po.columns.isEmpty)
         checkTableOwner(po)
@@ -1536,7 +1538,7 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
         assert(po.actionType === PrivilegeObjectActionType.INSERT)
         assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
         assert(po.catalog.isEmpty)
-        assert(po.dbname equalsIgnoreCase "default")
+        assert(po.dbname equalsIgnoreCase defaultDb)
         assert(po.objectName equalsIgnoreCase tableName)
         assert(po.columns === Seq("a", "b"))
         checkTableOwner(po)
@@ -1618,7 +1620,7 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
         assert(po.actionType === PrivilegeObjectActionType.INSERT)
         assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
         assert(po.catalog.isEmpty)
-        assert(po.dbname equalsIgnoreCase "default")
+        assert(po.dbname equalsIgnoreCase defaultDb)
         assert(po.objectName equalsIgnoreCase tableName)
         assert(po.columns === Seq("a", "b"))
         checkTableOwner(po)
@@ -1639,7 +1641,7 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
       val po0 = in.head
       assert(po0.actionType === PrivilegeObjectActionType.OTHER)
       assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
-      assert(po0.dbname === "default")
+      assert(po0.dbname === defaultDb)
       assert(po0.objectName === t)
       assert(po0.columns.isEmpty)
       checkTableOwner(po0)
@@ -1665,7 +1667,7 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
     assert(po.catalog.isEmpty)
-    assert(po.dbname === "default")
+    assert(po.dbname === defaultDb)
     assert(po.objectName === "OptimizedCreateHiveTableAsSelectCommand")
     assert(po.columns === Seq("a"))
     val accessType = ranger.AccessType(po, operationType, isInput = false)
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala
new file mode 100644
index 000000000..2297f73f9
--- /dev/null
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/RangerTestResources.scala
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kyuubi.plugin.spark.authz
+
+object RangerTestUsers {
+  // authorized users used in policy generation
+  val admin = "admin"
+  val alice = "alice"
+  val bob = "bob"
+  val kent = "kent"
+  val permViewUser = "perm_view_user"
+  val ownerPlaceHolder = "{OWNER}"
+  val createOnlyUser = "create_only_user"
+  val defaultTableOwner = "default_table_owner"
+  val permViewOnlyUser = "user_perm_view_only"
+
+  // non-authorized users
+  val invisibleUser = "i_am_invisible"
+  val denyUser = "denyuser"
+  val denyUser2 = "denyuser2"
+  val someone = "someone"
+}
+
+object RangerTestNamespace {
+  val defaultDb = "default"
+  val sparkCatalog = "spark_catalog"
+  val icebergNamespace = "iceberg_ns"
+  val namespace1 = "ns1"
+  val namespace2 = "ns2"
+}
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index ce8d6bc0c..6b1087930 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -26,6 +26,7 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession, 
SparkSessionExtension
 import org.scalatest.Assertions.convertToEqualizer
 
 import org.apache.kyuubi.Utils
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 
 trait SparkSessionProvider {
@@ -39,7 +40,6 @@ trait SparkSessionProvider {
   protected val extension: SparkSessionExtensions => Unit = _ => Unit
   protected val sqlExtensions: String = ""
 
-  protected val defaultTableOwner = "default_table_owner"
   protected val extraSparkConf: SparkConf = new SparkConf()
 
   protected lazy val spark: SparkSession = {
@@ -83,12 +83,12 @@ trait SparkSessionProvider {
       f
     } finally {
       res.foreach {
-        case (t, "table") => doAs("admin", sql(s"DROP TABLE IF EXISTS $t"))
-        case (db, "database") => doAs("admin", sql(s"DROP DATABASE IF EXISTS 
$db"))
-        case (fn, "function") => doAs("admin", sql(s"DROP FUNCTION IF EXISTS 
$fn"))
-        case (view, "view") => doAs("admin", sql(s"DROP VIEW IF EXISTS $view"))
+        case (t, "table") => doAs(admin, sql(s"DROP TABLE IF EXISTS $t"))
+        case (db, "database") => doAs(admin, sql(s"DROP DATABASE IF EXISTS 
$db"))
+        case (fn, "function") => doAs(admin, sql(s"DROP FUNCTION IF EXISTS 
$fn"))
+        case (view, "view") => doAs(admin, sql(s"DROP VIEW IF EXISTS $view"))
         case (cacheTable, "cache") => if (isSparkV32OrGreater) {
-            doAs("admin", sql(s"UNCACHE TABLE IF EXISTS $cacheTable"))
+            doAs(admin, sql(s"UNCACHE TABLE IF EXISTS $cacheTable"))
           }
         case (_, e) =>
           throw new RuntimeException(s"the resource whose resource type is $e 
cannot be cleared")
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
index dede81426..0ad6b3fea 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
@@ -23,6 +23,7 @@ import org.apache.hadoop.security.UserGroupInformation
 import org.apache.spark.sql.execution.QueryExecution
 
 import org.apache.kyuubi.plugin.spark.authz.OperationType._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
 import org.apache.kyuubi.plugin.spark.authz.ranger.AccessType
 import org.apache.kyuubi.plugin.spark.authz.serde.{Database, DB_COMMAND_SPECS}
 
@@ -688,8 +689,8 @@ abstract class V2CommandsPrivilegesSuite extends 
PrivilegesBuilderSuite {
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.DATABASE)
     assert(po.catalog.get === sparkSessionCatalogName)
-    assert(po.dbname === "default")
-    assert(po.objectName === "default")
+    assert(po.dbname === defaultDb)
+    assert(po.objectName === defaultDb)
     assert(po.columns.isEmpty)
   }
 
@@ -732,8 +733,8 @@ abstract class V2CommandsPrivilegesSuite extends 
PrivilegesBuilderSuite {
     assert(po.actionType === PrivilegeObjectActionType.OTHER)
     assert(po.privilegeObjectType === PrivilegeObjectType.DATABASE)
     assert(po.catalog.get === sparkSessionCatalogName)
-    assert(po.dbname === "default")
-    assert(po.objectName === "default")
+    assert(po.dbname === defaultDb)
+    assert(po.objectName === defaultDb)
     assert(po.columns.isEmpty)
     val accessType = ranger.AccessType(po, operationType, isInput = false)
     assert(accessType === AccessType.ALTER)
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
index 6b1cedf78..ba6992362 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
@@ -23,6 +23,8 @@ import org.scalatest.Outcome
 
 import org.apache.kyuubi.Utils
 import org.apache.kyuubi.plugin.spark.authz.AccessControlException
+import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 
 /**
  * Tests for RangerSparkExtensionSuite
@@ -36,7 +38,7 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
     else ""
 
   val catalogV2 = "local"
-  val namespace1 = "iceberg_ns"
+  val namespace1 = icebergNamespace
   val table1 = "table1"
   val outputTable1 = "outputTable1"
 
@@ -57,18 +59,18 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
 
       super.beforeAll()
 
-      doAs("admin", sql(s"CREATE DATABASE IF NOT EXISTS 
$catalogV2.$namespace1"))
+      doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $catalogV2.$namespace1"))
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$table1" +
           " (id int, name string, city string) USING iceberg"))
 
       doAs(
-        "admin",
+        admin,
         sql(s"INSERT INTO $catalogV2.$namespace1.$table1" +
           " (id , name , city ) VALUES (1, 'liangbowen','Guangzhou')"))
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$outputTable1" 
+
           " (id int, name string, city string) USING iceberg"))
     }
@@ -93,7 +95,7 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
     // MergeIntoTable:  Using a MERGE INTO Statement
     val e1 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(mergeIntoSql)))
     assert(e1.getMessage.contains(s"does not have [select] privilege" +
       s" on [$namespace1/$table1/id]"))
@@ -104,7 +106,7 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
         true)
       val e2 = intercept[AccessControlException](
         doAs(
-          "someone",
+          someone,
           sql(mergeIntoSql)))
       assert(e2.getMessage.contains(s"does not have" +
         s" [select] privilege" +
@@ -116,21 +118,21 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
         false)
     }
 
-    doAs("admin", sql(mergeIntoSql))
+    doAs(admin, sql(mergeIntoSql))
   }
 
   test("[KYUUBI #3515] UPDATE TABLE") {
     // UpdateTable
     val e1 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"UPDATE $catalogV2.$namespace1.$table1 SET city='Guangzhou' " +
           " WHERE id=1")))
     assert(e1.getMessage.contains(s"does not have [update] privilege" +
       s" on [$namespace1/$table1]"))
 
     doAs(
-      "admin",
+      admin,
       sql(s"UPDATE $catalogV2.$namespace1.$table1 SET city='Guangzhou' " +
         " WHERE id=1"))
   }
@@ -138,11 +140,11 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
   test("[KYUUBI #3515] DELETE FROM TABLE") {
     // DeleteFromTable
     val e6 = intercept[AccessControlException](
-      doAs("someone", sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE 
id=2")))
+      doAs(someone, sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE 
id=2")))
     assert(e6.getMessage.contains(s"does not have [update] privilege" +
       s" on [$namespace1/$table1]"))
 
-    doAs("admin", sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE 
id=2"))
+    doAs(admin, sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE id=2"))
   }
 
   test("[KYUUBI #3666] Support {OWNER} variable for queries run on CatalogV2") 
{
@@ -163,7 +165,7 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
         }.isSuccess))
 
       doAs(
-        "create_only_user", {
+        createOnlyUser, {
           val e = intercept[AccessControlException](sql(select).collect())
           assert(e.getMessage === errorMessage("select", 
s"$namespace1/$table/key"))
         })
@@ -178,17 +180,17 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
       (s"$catalogV2.default.src", "table"),
       (s"$catalogV2.default.outputTable2", "table"))) {
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.default.src" +
           " (id int, name string, key string) USING iceberg"))
       doAs(
-        "admin",
+        admin,
         sql(s"INSERT INTO $catalogV2.default.src" +
           " (id , name , key ) VALUES " +
           "(1, 'liangbowen1','10')" +
           ", (2, 'liangbowen2','20')"))
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$outputTable2" 
+
           " (id int, name string, key string) USING iceberg"))
 
@@ -200,20 +202,20 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
            |WHEN NOT MATCHED THEN INSERT (id, name, key) VALUES (source.id, 
source.name, source.key)
         """.stripMargin
 
-      doAs("admin", sql(mergeIntoSql))
+      doAs(admin, sql(mergeIntoSql))
       doAs(
-        "admin", {
+        admin, {
           val countOutputTable =
             sql(s"select count(1) from 
$catalogV2.$namespace1.$outputTable2").collect()
           val rowCount = countOutputTable(0).get(0)
           assert(rowCount === 2)
         })
-      doAs("admin", sql(s"truncate table 
$catalogV2.$namespace1.$outputTable2"))
+      doAs(admin, sql(s"truncate table $catalogV2.$namespace1.$outputTable2"))
 
       // source table with row filter `key`<20
-      doAs("bob", sql(mergeIntoSql))
+      doAs(bob, sql(mergeIntoSql))
       doAs(
-        "admin", {
+        admin, {
           val countOutputTable =
             sql(s"select count(1) from 
$catalogV2.$namespace1.$outputTable2").collect()
           val rowCount = countOutputTable(0).get(0)
@@ -224,7 +226,7 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
 
   test("[KYUUBI #4255] DESCRIBE TABLE") {
     val e1 = intercept[AccessControlException](
-      doAs("someone", sql(s"DESCRIBE TABLE 
$catalogV2.$namespace1.$table1").explain()))
+      doAs(someone, sql(s"DESCRIBE TABLE 
$catalogV2.$namespace1.$table1").explain()))
     assert(e1.getMessage.contains(s"does not have [select] privilege" +
       s" on [$namespace1/$table1]"))
   }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index beef36d5d..6424832ea 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -31,6 +31,8 @@ import org.scalatest.BeforeAndAfterAll
 import org.scalatest.funsuite.AnyFunSuite
 
 import org.apache.kyuubi.plugin.spark.authz.{AccessControlException, 
SparkSessionProvider}
+import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import 
org.apache.kyuubi.plugin.spark.authz.ranger.RuleAuthorization.KYUUBI_AUTHZ_TAG
 import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.getFieldVal
 
@@ -88,7 +90,7 @@ abstract class RangerSparkExtensionSuite extends AnyFunSuite
   }
 
   test("[KYUUBI #3226] RuleAuthorization: Should check privileges once only.") 
{
-    val logicalPlan = doAs("admin", sql("SHOW TABLES").queryExecution.logical)
+    val logicalPlan = doAs(admin, sql("SHOW TABLES").queryExecution.logical)
     val rule = new RuleAuthorization(spark)
 
     (1 until 10).foreach { i =>
@@ -116,7 +118,7 @@ abstract class RangerSparkExtensionSuite extends AnyFunSuite
     withCleanTmpResources(Seq((testTable, "table"))) {
       // create tmp table
       doAs(
-        "admin", {
+        admin, {
           sql(create)
 
           // session1: first query, should auth once.[LogicalRelation]
@@ -155,18 +157,18 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
     val e = intercept[AccessControlException](sql(create))
     assert(e.getMessage === errorMessage("create", "mydb"))
     withCleanTmpResources(Seq((testDb, "database"))) {
-      doAs("admin", assert(Try { sql(create) }.isSuccess))
-      doAs("admin", assert(Try { sql(alter) }.isSuccess))
+      doAs(admin, assert(Try { sql(create) }.isSuccess))
+      doAs(admin, assert(Try { sql(alter) }.isSuccess))
       val e1 = intercept[AccessControlException](sql(alter))
       assert(e1.getMessage === errorMessage("alter", "mydb"))
       val e2 = intercept[AccessControlException](sql(drop))
       assert(e2.getMessage === errorMessage("drop", "mydb"))
-      doAs("kent", Try(sql("SHOW DATABASES")).isSuccess)
+      doAs(kent, Try(sql("SHOW DATABASES")).isSuccess)
     }
   }
 
   test("auth: tables") {
-    val db = "default"
+    val db = defaultDb
     val table = "src"
     val col = "key"
 
@@ -178,14 +180,14 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
     assert(e.getMessage === errorMessage("create"))
 
     withCleanTmpResources(Seq((s"$db.$table", "table"))) {
-      doAs("bob", assert(Try { sql(create0) }.isSuccess))
-      doAs("bob", assert(Try { sql(alter0) }.isSuccess))
+      doAs(bob, assert(Try { sql(create0) }.isSuccess))
+      doAs(bob, assert(Try { sql(alter0) }.isSuccess))
 
       val e1 = intercept[AccessControlException](sql(drop0))
       assert(e1.getMessage === errorMessage("drop"))
-      doAs("bob", assert(Try { sql(alter0) }.isSuccess))
-      doAs("bob", assert(Try { sql(select).collect() }.isSuccess))
-      doAs("kent", assert(Try { sql(s"SELECT key FROM $db.$table").collect() 
}.isSuccess))
+      doAs(bob, assert(Try { sql(alter0) }.isSuccess))
+      doAs(bob, assert(Try { sql(select).collect() }.isSuccess))
+      doAs(kent, assert(Try { sql(s"SELECT key FROM $db.$table").collect() 
}.isSuccess))
 
       Seq(
         select,
@@ -196,10 +198,10 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
         s"SELECT key FROM $db.$table WHERE value in (SELECT value as key FROM 
$db.$table)")
         .foreach { q =>
           doAs(
-            "kent", {
+            kent, {
               withClue(q) {
                 val e = intercept[AccessControlException](sql(q).collect())
-                assert(e.getMessage === errorMessage("select", 
"default/src/value", "kent"))
+                assert(e.getMessage === errorMessage("select", 
"default/src/value", kent))
               }
             })
         }
@@ -207,15 +209,15 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
   }
 
   test("auth: functions") {
-    val db = "default"
+    val db = defaultDb
     val func = "func"
     val create0 = s"CREATE FUNCTION IF NOT EXISTS $db.$func AS 'abc.mnl.xyz'"
     doAs(
-      "kent", {
+      kent, {
         val e = intercept[AccessControlException](sql(create0))
         assert(e.getMessage === errorMessage("create", "default/func"))
       })
-    doAs("admin", assert(Try(sql(create0)).isSuccess))
+    doAs(admin, assert(Try(sql(create0)).isSuccess))
   }
 
   test("show tables") {
@@ -226,14 +228,14 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
         (s"$db.$table", "table"),
         (s"$db.${table}for_show", "table"),
         (s"$db", "database"))) {
-      doAs("admin", sql(s"CREATE DATABASE IF NOT EXISTS $db"))
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db.$table (key int) 
USING $format"))
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db.${table}for_show (key 
int) USING $format"))
-
-      doAs("admin", assert(sql(s"show tables from $db").collect().length === 
2))
-      doAs("bob", assert(sql(s"show tables from $db").collect().length === 0))
-      doAs("i_am_invisible", assert(sql(s"show tables from 
$db").collect().length === 0))
-      doAs("i_am_invisible", assert(sql(s"show tables from 
$db").limit(1).isEmpty))
+      doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $db"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db.$table (key int) USING 
$format"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db.${table}for_show (key 
int) USING $format"))
+
+      doAs(admin, assert(sql(s"show tables from $db").collect().length === 2))
+      doAs(bob, assert(sql(s"show tables from $db").collect().length === 0))
+      doAs(invisibleUser, assert(sql(s"show tables from $db").collect().length 
=== 0))
+      doAs(invisibleUser, assert(sql(s"show tables from 
$db").limit(1).isEmpty))
     }
   }
 
@@ -241,19 +243,19 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
     val db = "default2"
 
     withCleanTmpResources(Seq((db, "database"))) {
-      doAs("admin", sql(s"CREATE DATABASE IF NOT EXISTS $db"))
-      doAs("admin", assert(sql(s"SHOW DATABASES").collect().length == 2))
-      doAs("admin", assert(sql(s"SHOW 
DATABASES").collectAsList().get(0).getString(0) == "default"))
-      doAs("admin", assert(sql(s"SHOW 
DATABASES").collectAsList().get(1).getString(0) == s"$db"))
-
-      doAs("bob", assert(sql(s"SHOW DATABASES").collect().length == 1))
-      doAs("bob", assert(sql(s"SHOW 
DATABASES").collectAsList().get(0).getString(0) == "default"))
-      doAs("i_am_invisible", assert(sql(s"SHOW DATABASES").limit(1).isEmpty))
+      doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $db"))
+      doAs(admin, assert(sql(s"SHOW DATABASES").collect().length == 2))
+      doAs(admin, assert(sql(s"SHOW 
DATABASES").collectAsList().get(0).getString(0) == defaultDb))
+      doAs(admin, assert(sql(s"SHOW 
DATABASES").collectAsList().get(1).getString(0) == s"$db"))
+
+      doAs(bob, assert(sql(s"SHOW DATABASES").collect().length == 1))
+      doAs(bob, assert(sql(s"SHOW 
DATABASES").collectAsList().get(0).getString(0) == defaultDb))
+      doAs(invisibleUser, assert(sql(s"SHOW DATABASES").limit(1).isEmpty))
     }
   }
 
   test("show functions") {
-    val default = "default"
+    val default = defaultDb
     val db3 = "default3"
     val function1 = "function1"
 
@@ -261,41 +263,41 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
       (s"$default.$function1", "function"),
       (s"$db3.$function1", "function"),
       (db3, "database"))) {
-      doAs("admin", sql(s"CREATE FUNCTION $function1 AS 'Function1'"))
-      doAs("admin", assert(sql(s"show user functions 
$default.$function1").collect().length == 1))
-      doAs("bob", assert(sql(s"show user functions 
$default.$function1").collect().length == 0))
+      doAs(admin, sql(s"CREATE FUNCTION $function1 AS 'Function1'"))
+      doAs(admin, assert(sql(s"show user functions 
$default.$function1").collect().length == 1))
+      doAs(bob, assert(sql(s"show user functions 
$default.$function1").collect().length == 0))
 
-      doAs("admin", sql(s"CREATE DATABASE IF NOT EXISTS $db3"))
-      doAs("admin", sql(s"CREATE FUNCTION $db3.$function1 AS 'Function1'"))
+      doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $db3"))
+      doAs(admin, sql(s"CREATE FUNCTION $db3.$function1 AS 'Function1'"))
 
-      doAs("admin", assert(sql(s"show user functions 
$db3.$function1").collect().length == 1))
-      doAs("bob", assert(sql(s"show user functions 
$db3.$function1").collect().length == 0))
+      doAs(admin, assert(sql(s"show user functions 
$db3.$function1").collect().length == 1))
+      doAs(bob, assert(sql(s"show user functions 
$db3.$function1").collect().length == 0))
 
-      doAs("admin", assert(sql(s"show system functions").collect().length > 0))
-      doAs("bob", assert(sql(s"show system functions").collect().length > 0))
+      doAs(admin, assert(sql(s"show system functions").collect().length > 0))
+      doAs(bob, assert(sql(s"show system functions").collect().length > 0))
 
-      val adminSystemFunctionCount = doAs("admin", sql(s"show system 
functions").collect().length)
-      val bobSystemFunctionCount = doAs("bob", sql(s"show system 
functions").collect().length)
+      val adminSystemFunctionCount = doAs(admin, sql(s"show system 
functions").collect().length)
+      val bobSystemFunctionCount = doAs(bob, sql(s"show system 
functions").collect().length)
       assert(adminSystemFunctionCount == bobSystemFunctionCount)
     }
   }
 
   test("show columns") {
-    val db = "default"
+    val db = defaultDb
     val table = "src"
     val col = "key"
     val create = s"CREATE TABLE IF NOT EXISTS $db.$table ($col int, value int) 
USING $format"
 
     withCleanTmpResources(Seq((s"$db.$table", "table"))) {
-      doAs("admin", sql(create))
+      doAs(admin, sql(create))
 
-      doAs("admin", assert(sql(s"SHOW COLUMNS IN $table").count() == 2))
-      doAs("admin", assert(sql(s"SHOW COLUMNS IN $db.$table").count() == 2))
-      doAs("admin", assert(sql(s"SHOW COLUMNS IN $table IN $db").count() == 2))
+      doAs(admin, assert(sql(s"SHOW COLUMNS IN $table").count() == 2))
+      doAs(admin, assert(sql(s"SHOW COLUMNS IN $db.$table").count() == 2))
+      doAs(admin, assert(sql(s"SHOW COLUMNS IN $table IN $db").count() == 2))
 
-      doAs("kent", assert(sql(s"SHOW COLUMNS IN $table").count() == 1))
-      doAs("kent", assert(sql(s"SHOW COLUMNS IN $db.$table").count() == 1))
-      doAs("kent", assert(sql(s"SHOW COLUMNS IN $table IN $db").count() == 1))
+      doAs(kent, assert(sql(s"SHOW COLUMNS IN $table").count() == 1))
+      doAs(kent, assert(sql(s"SHOW COLUMNS IN $db.$table").count() == 1))
+      doAs(kent, assert(sql(s"SHOW COLUMNS IN $table IN $db").count() == 1))
     }
   }
 
@@ -310,24 +312,24 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
       (s"$db.${table}_select2", "table"),
       (s"$db.${table}_select3", "table"),
       (s"$db", "database"))) {
-      doAs("admin", sql(s"CREATE DATABASE IF NOT EXISTS $db"))
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_use1 (key 
int) USING $format"))
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_use2 (key 
int) USING $format"))
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_select1 (key 
int) USING $format"))
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_select2 (key 
int) USING $format"))
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_select3 (key 
int) USING $format"))
+      doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $db"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_use1 (key int) 
USING $format"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_use2 (key int) 
USING $format"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_select1 (key 
int) USING $format"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_select2 (key 
int) USING $format"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db.${table}_select3 (key 
int) USING $format"))
 
       doAs(
-        "admin",
+        admin,
         assert(sql(s"show table extended from $db like 
'$table*'").collect().length === 5))
       doAs(
-        "bob",
+        bob,
         assert(sql(s"show tables from $db").collect().length === 5))
       doAs(
-        "bob",
+        bob,
         assert(sql(s"show table extended from $db like 
'$table*'").collect().length === 3))
       doAs(
-        "i_am_invisible",
+        invisibleUser,
         assert(sql(s"show table extended from $db like 
'$table*'").collect().length === 0))
     }
   }
@@ -339,48 +341,48 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
     val globalTempView2 = "global_temp_view2"
 
     // create or replace view
-    doAs("denyuser", sql(s"CREATE TEMPORARY VIEW $tempView AS select * from 
values(1)"))
+    doAs(denyUser, sql(s"CREATE TEMPORARY VIEW $tempView AS select * from 
values(1)"))
     doAs(
-      "denyuser",
+      denyUser,
       sql(s"CREATE GLOBAL TEMPORARY VIEW $globalTempView AS SELECT * FROM 
values(1)"))
 
     // rename view
-    doAs("denyuser2", sql(s"ALTER VIEW $tempView RENAME TO $tempView2"))
+    doAs(denyUser2, sql(s"ALTER VIEW $tempView RENAME TO $tempView2"))
     doAs(
-      "denyuser2",
+      denyUser2,
       sql(s"ALTER VIEW global_temp.$globalTempView RENAME TO 
global_temp.$globalTempView2"))
 
-    doAs("admin", sql(s"DROP VIEW IF EXISTS $tempView2"))
-    doAs("admin", sql(s"DROP VIEW IF EXISTS global_temp.$globalTempView2"))
-    doAs("admin", assert(sql("show tables from global_temp").collect().length 
== 0))
+    doAs(admin, sql(s"DROP VIEW IF EXISTS $tempView2"))
+    doAs(admin, sql(s"DROP VIEW IF EXISTS global_temp.$globalTempView2"))
+    doAs(admin, assert(sql("show tables from global_temp").collect().length == 
0))
   }
 
   test("[KYUUBI #3426] Drop temp view should be skipped permission check") {
     val tempView = "temp_view"
     val globalTempView = "global_temp_view"
-    doAs("denyuser", sql(s"CREATE TEMPORARY VIEW $tempView AS select * from 
values(1)"))
+    doAs(denyUser, sql(s"CREATE TEMPORARY VIEW $tempView AS select * from 
values(1)"))
 
     doAs(
-      "denyuser",
+      denyUser,
       sql(s"CREATE OR REPLACE TEMPORARY VIEW $tempView" +
         s" AS select * from values(1)"))
 
     doAs(
-      "denyuser",
+      denyUser,
       sql(s"CREATE GLOBAL TEMPORARY VIEW $globalTempView AS SELECT * FROM 
values(1)"))
 
     doAs(
-      "denyuser",
+      denyUser,
       sql(s"CREATE OR REPLACE GLOBAL TEMPORARY VIEW $globalTempView" +
         s" AS select * from values(1)"))
 
     // global_temp will contain the temporary view, even if it is not global
-    doAs("admin", assert(sql("show tables from global_temp").collect().length 
== 2))
+    doAs(admin, assert(sql("show tables from global_temp").collect().length == 
2))
 
-    doAs("denyuser2", sql(s"DROP VIEW IF EXISTS $tempView"))
-    doAs("denyuser2", sql(s"DROP VIEW IF EXISTS global_temp.$globalTempView"))
+    doAs(denyUser2, sql(s"DROP VIEW IF EXISTS $tempView"))
+    doAs(denyUser2, sql(s"DROP VIEW IF EXISTS global_temp.$globalTempView"))
 
-    doAs("admin", assert(sql("show tables from global_temp").collect().length 
== 0))
+    doAs(admin, assert(sql("show tables from global_temp").collect().length == 
0))
   }
 
   test("[KYUUBI #3428] AlterViewAsCommand should be skipped permission check") 
{
@@ -388,26 +390,26 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
     val globalTempView = "global_temp_view"
 
     // create or replace view
-    doAs("denyuser", sql(s"CREATE TEMPORARY VIEW $tempView AS select * from 
values(1)"))
+    doAs(denyUser, sql(s"CREATE TEMPORARY VIEW $tempView AS select * from 
values(1)"))
     doAs(
-      "denyuser",
+      denyUser,
       sql(s"CREATE OR REPLACE TEMPORARY VIEW $tempView" +
         s" AS select * from values(1)"))
     doAs(
-      "denyuser",
+      denyUser,
       sql(s"CREATE GLOBAL TEMPORARY VIEW $globalTempView AS SELECT * FROM 
values(1)"))
     doAs(
-      "denyuser",
+      denyUser,
       sql(s"CREATE OR REPLACE GLOBAL TEMPORARY VIEW $globalTempView" +
         s" AS select * from values(1)"))
 
     // rename view
-    doAs("denyuser2", sql(s"ALTER VIEW $tempView AS SELECT * FROM values(1)"))
-    doAs("denyuser2", sql(s"ALTER VIEW global_temp.$globalTempView AS SELECT * 
FROM values(1)"))
+    doAs(denyUser2, sql(s"ALTER VIEW $tempView AS SELECT * FROM values(1)"))
+    doAs(denyUser2, sql(s"ALTER VIEW global_temp.$globalTempView AS SELECT * 
FROM values(1)"))
 
-    doAs("admin", sql(s"DROP VIEW IF EXISTS $tempView"))
-    doAs("admin", sql(s"DROP VIEW IF EXISTS global_temp.$globalTempView"))
-    doAs("admin", assert(sql("show tables from global_temp").collect().length 
== 0))
+    doAs(admin, sql(s"DROP VIEW IF EXISTS $tempView"))
+    doAs(admin, sql(s"DROP VIEW IF EXISTS global_temp.$globalTempView"))
+    doAs(admin, assert(sql("show tables from global_temp").collect().length == 
0))
   }
 
   test("[KYUUBI #3343] pass temporary view creation") {
@@ -416,28 +418,28 @@ abstract class RangerSparkExtensionSuite extends 
AnyFunSuite
 
     withTempView(tempView) {
       doAs(
-        "denyuser",
+        denyUser,
         assert(Try(sql(s"CREATE TEMPORARY VIEW $tempView AS select * from 
values(1)")).isSuccess))
 
       doAs(
-        "denyuser",
+        denyUser,
         Try(sql(s"CREATE OR REPLACE TEMPORARY VIEW $tempView" +
           s" AS select * from values(1)")).isSuccess)
     }
 
     withGlobalTempView(globalTempView) {
       doAs(
-        "denyuser",
+        denyUser,
         Try(
           sql(
             s"CREATE GLOBAL TEMPORARY VIEW $globalTempView AS SELECT * FROM 
values(1)")).isSuccess)
 
       doAs(
-        "denyuser",
+        denyUser,
         Try(sql(s"CREATE OR REPLACE GLOBAL TEMPORARY VIEW $globalTempView" +
           s" AS select * from values(1)")).isSuccess)
     }
-    doAs("admin", assert(sql("show tables from global_temp").collect().length 
== 0))
+    doAs(admin, assert(sql("show tables from global_temp").collect().length == 
0))
   }
 }
 
@@ -450,9 +452,9 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   test("table stats must be specified") {
     val table = "hive_src"
     withCleanTmpResources(Seq((table, "table"))) {
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $table (id int)"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $table (id int)"))
       doAs(
-        "admin", {
+        admin, {
           val hiveTableRelation = sql(s"SELECT * FROM $table")
             
.queryExecution.optimizedPlan.collectLeaves().head.asInstanceOf[HiveTableRelation]
           assert(getFieldVal[Option[Statistics]](hiveTableRelation, 
"tableStats").nonEmpty)
@@ -463,9 +465,9 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   test("HiveTableRelation should be able to be converted to LogicalRelation") {
     val table = "hive_src"
     withCleanTmpResources(Seq((table, "table"))) {
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $table (id int) STORED AS 
PARQUET"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $table (id int) STORED AS 
PARQUET"))
       doAs(
-        "admin", {
+        admin, {
           val relation = sql(s"SELECT * FROM $table")
             .queryExecution.optimizedPlan.collectLeaves().head
           assert(relation.isInstanceOf[LogicalRelation])
@@ -483,7 +485,7 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       (s"$db.$table1", "table"),
       (s"$db", "database"))) {
       doAs(
-        "admin", {
+        admin, {
           sql(s"CREATE DATABASE IF NOT EXISTS $db")
           sql(s"CREATE TABLE IF NOT EXISTS $db.$table1(id int) STORED AS 
PARQUET")
           sql(s"INSERT INTO $db.$table1 SELECT 1")
@@ -504,16 +506,16 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       (adminPermView, "view"),
       (permView, "view"),
       (table, "table"))) {
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $table (id int)"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $table (id int)"))
 
-      doAs("admin", sql(s"CREATE VIEW ${adminPermView} AS SELECT * FROM 
$table"))
+      doAs(admin, sql(s"CREATE VIEW ${adminPermView} AS SELECT * FROM $table"))
 
       val e1 = intercept[AccessControlException](
-        doAs("someone", sql(s"CREATE VIEW $permView AS SELECT 1 as a")))
+        doAs(someone, sql(s"CREATE VIEW $permView AS SELECT 1 as a")))
       assert(e1.getMessage.contains(s"does not have [create] privilege on 
[default/$permView]"))
 
       val e2 = intercept[AccessControlException](
-        doAs("someone", sql(s"CREATE VIEW $permView AS SELECT * FROM $table")))
+        doAs(someone, sql(s"CREATE VIEW $permView AS SELECT * FROM $table")))
       if (isSparkV32OrGreater) {
         assert(e2.getMessage.contains(s"does not have [select] privilege on 
[default/$table/id]"))
       } else {
@@ -523,20 +525,20 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   }
 
   test("[KYUUBI #3326] check persisted view and skip shadowed table") {
-    val db1 = "default"
+    val db1 = defaultDb
     val table = "hive_src"
     val permView = "perm_view"
 
     withCleanTmpResources(Seq(
       (s"$db1.$table", "table"),
       (s"$db1.$permView", "view"))) {
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name 
string)"))
-      doAs("admin", sql(s"CREATE VIEW $db1.$permView AS SELECT * FROM 
$db1.$table"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name 
string)"))
+      doAs(admin, sql(s"CREATE VIEW $db1.$permView AS SELECT * FROM 
$db1.$table"))
 
       // KYUUBI #3326: with no privileges to the permanent view or the source 
table
       val e1 = intercept[AccessControlException](
         doAs(
-          "someone", {
+          someone, {
             sql(s"select * from $db1.$permView").collect()
           }))
       if (isSparkV31OrGreater) {
@@ -548,16 +550,16 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   }
 
   test("KYUUBI #4504: query permanent view with privilege to permanent view 
only") {
-    val db1 = "default"
+    val db1 = defaultDb
     val table = "hive_src"
     val permView = "perm_view"
-    val userPermViewOnly = "user_perm_view_only"
+    val userPermViewOnly = permViewOnlyUser
 
     withCleanTmpResources(Seq(
       (s"$db1.$table", "table"),
       (s"$db1.$permView", "view"))) {
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name 
string)"))
-      doAs("admin", sql(s"CREATE VIEW $db1.$permView AS SELECT * FROM 
$db1.$table"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name 
string)"))
+      doAs(admin, sql(s"CREATE VIEW $db1.$permView AS SELECT * FROM 
$db1.$table"))
 
       // query all columns of the permanent view
       // with access privileges to the permanent view but no privilege to the 
source table
@@ -582,7 +584,7 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   }
 
   test("[KYUUBI #3371] support throws all disallowed privileges in exception") 
{
-    val db1 = "default"
+    val db1 = defaultDb
     val srcTable1 = "hive_src1"
     val srcTable2 = "hive_src2"
     val sinkTable1 = "hive_sink1"
@@ -592,17 +594,17 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       (s"$db1.$srcTable2", "table"),
       (s"$db1.$sinkTable1", "table"))) {
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $db1.$srcTable1" +
           s" (id int, name string, city string)"))
 
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $db1.$srcTable2" +
           s" (id int, age int)"))
 
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $db1.$sinkTable1" +
           s" (id int, age int, name string, city string)"))
 
@@ -611,14 +613,14 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
         s" FROM $db1.$srcTable1 as tb1" +
         s" JOIN $db1.$srcTable2 as tb2" +
         s" on tb1.id = tb2.id"
-      val e1 = intercept[AccessControlException](doAs("someone", 
sql(insertSql1)))
+      val e1 = intercept[AccessControlException](doAs(someone, 
sql(insertSql1)))
       assert(e1.getMessage.contains(s"does not have [select] privilege on 
[$db1/$srcTable1/id]"))
 
       try {
         SparkRangerAdminPlugin.getRangerConf.setBoolean(
           
s"ranger.plugin.${SparkRangerAdminPlugin.getServiceType}.authorize.in.single.call",
           true)
-        val e2 = intercept[AccessControlException](doAs("someone", 
sql(insertSql1)))
+        val e2 = intercept[AccessControlException](doAs(someone, 
sql(insertSql1)))
         assert(e2.getMessage.contains(s"does not have" +
           s" [select] privilege on" +
           s" [$db1/$srcTable1/id,$db1/$srcTable1/name,$db1/$srcTable1/city," +
@@ -637,7 +639,7 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   test("[KYUUBI #3411] skip checking cache table") {
     if (isSparkV32OrGreater) { // cache table sql supported since 3.2.0
 
-      val db1 = "default"
+      val db1 = defaultDb
       val srcTable1 = "hive_src1"
       val cacheTable1 = "cacheTable1"
       val cacheTable2 = "cacheTable2"
@@ -652,23 +654,23 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
         (s"$db1.$cacheTable4", "cache"))) {
 
         doAs(
-          "admin",
+          admin,
           sql(s"CREATE TABLE IF NOT EXISTS $db1.$srcTable1" +
             s" (id int, name string, city string)"))
 
         val e1 = intercept[AccessControlException](
-          doAs("someone", sql(s"CACHE TABLE $cacheTable2 select * from 
$db1.$srcTable1")))
+          doAs(someone, sql(s"CACHE TABLE $cacheTable2 select * from 
$db1.$srcTable1")))
         assert(
           e1.getMessage.contains(s"does not have [select] privilege on 
[$db1/$srcTable1/id]"))
 
-        doAs("admin", sql(s"CACHE TABLE $cacheTable3 SELECT 1 AS a, 2 AS b "))
-        doAs("someone", sql(s"CACHE TABLE $cacheTable4 select 1 as a, 2 as b 
"))
+        doAs(admin, sql(s"CACHE TABLE $cacheTable3 SELECT 1 AS a, 2 AS b "))
+        doAs(someone, sql(s"CACHE TABLE $cacheTable4 select 1 as a, 2 as b "))
       }
     }
   }
 
   test("[KYUUBI #3608] Support {OWNER} variable for queries") {
-    val db = "default"
+    val db = defaultDb
     val table = "owner_variable"
 
     val select = s"SELECT key FROM $db.$table"
@@ -687,7 +689,7 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
         }.isSuccess))
 
       doAs(
-        "create_only_user", {
+        createOnlyUser, {
           val e = intercept[AccessControlException](sql(select).collect())
           assert(e.getMessage === errorMessage("select", s"$db/$table/key"))
         })
@@ -701,22 +703,22 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       Seq(
         (s"$db.$table", "table"),
         (s"$db", "database"))) {
-      doAs("admin", sql(s"CREATE DATABASE IF NOT EXISTS $db"))
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db.$table (key int) 
USING $format"))
+      doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $db"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db.$table (key int) USING 
$format"))
       sql("SHOW DATABASES").queryExecution.optimizedPlan.stats
       sql(s"SHOW TABLES IN $db").queryExecution.optimizedPlan.stats
     }
   }
 
   test("[KYUUBI #4658] insert overwrite hive directory") {
-    val db1 = "default"
+    val db1 = defaultDb
     val table = "src"
 
     withCleanTmpResources(Seq((s"$db1.$table", "table"))) {
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name 
string)"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name 
string)"))
       val e = intercept[AccessControlException](
         doAs(
-          "someone",
+          someone,
           sql(
             s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir' ROW FORMAT 
DELIMITED FIELDS
                | TERMINATED BY ','
@@ -726,14 +728,14 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   }
 
   test("[KYUUBI #4658] insert overwrite datasource directory") {
-    val db1 = "default"
+    val db1 = defaultDb
     val table = "src"
 
     withCleanTmpResources(Seq((s"$db1.$table", "table"))) {
-      doAs("admin", sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name 
string)"))
+      doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name 
string)"))
       val e = intercept[AccessControlException](
         doAs(
-          "someone",
+          someone,
           sql(
             s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir'
                | USING parquet
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPluginSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPluginSuite.scala
index 3338a3314..301ae87c5 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPluginSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/SparkRangerAdminPluginSuite.scala
@@ -22,6 +22,8 @@ import org.apache.hadoop.security.UserGroupInformation
 import org.scalatest.funsuite.AnyFunSuite
 
 import org.apache.kyuubi.plugin.spark.authz.{ObjectType, OperationType}
+import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import org.apache.kyuubi.plugin.spark.authz.ranger.SparkRangerAdminPlugin._
 
 class SparkRangerAdminPluginSuite extends AnyFunSuite {
@@ -29,13 +31,13 @@ class SparkRangerAdminPluginSuite extends AnyFunSuite {
 
   test("get filter expression") {
     val bob = UserGroupInformation.createRemoteUser("bob")
-    val are = AccessResource(ObjectType.TABLE, "default", "src", null)
+    val are = AccessResource(ObjectType.TABLE, defaultDb, "src", null)
     def buildAccessRequest(ugi: UserGroupInformation): AccessRequest = {
       AccessRequest(are, ugi, OperationType.QUERY, AccessType.SELECT)
     }
     val maybeString = getFilterExpr(buildAccessRequest(bob))
     assert(maybeString.get === "key<20")
-    Seq("admin", "alice").foreach { user =>
+    Seq(admin, alice).foreach { user =>
       val ugi = UserGroupInformation.createRemoteUser(user)
       val maybeString = getFilterExpr(buildAccessRequest(ugi))
       assert(maybeString.isEmpty)
@@ -45,7 +47,7 @@ class SparkRangerAdminPluginSuite extends AnyFunSuite {
   test("get data masker") {
     val bob = UserGroupInformation.createRemoteUser("bob")
     def buildAccessRequest(ugi: UserGroupInformation, column: String): 
AccessRequest = {
-      val are = AccessResource(ObjectType.COLUMN, "default", "src", column)
+      val are = AccessResource(ObjectType.COLUMN, defaultDb, "src", column)
       AccessRequest(are, ugi, OperationType.QUERY, AccessType.SELECT)
     }
     assert(getMaskingExpr(buildAccessRequest(bob, "value1")).get === 
"md5(cast(value1 as string))")
@@ -59,7 +61,7 @@ class SparkRangerAdminPluginSuite extends AnyFunSuite {
       "left(value5, length(value5) - 4), '[A-Z]', 'X'), '[a-z]', 'x')," +
       " '[0-9]', 'n'), '[^A-Za-z0-9]', 'U'), right(value5, 4))")
 
-    Seq("admin", "alice").foreach { user =>
+    Seq(admin, alice).foreach { user =>
       val ugi = UserGroupInformation.createRemoteUser(user)
       val maybeString = getMaskingExpr(buildAccessRequest(ugi, "value1"))
       assert(maybeString.isEmpty)
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
index 73a13bc1c..07fe0ae5a 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
@@ -22,6 +22,8 @@ import scala.util.Try
 
 // scalastyle:off
 import org.apache.kyuubi.plugin.spark.authz.AccessControlException
+import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 
 /**
  * Tests for RangerSparkExtensionSuite
@@ -32,8 +34,6 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
   val catalogV2 = "testcat"
   val jdbcCatalogV2 = "jdbc2"
-  val namespace1 = "ns1"
-  val namespace2 = "ns2"
   val table1 = "table1"
   val table2 = "table2"
   val outputTable1 = "outputTable1"
@@ -54,13 +54,13 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
       super.beforeAll()
 
-      doAs("admin", sql(s"CREATE DATABASE IF NOT EXISTS 
$catalogV2.$namespace1"))
+      doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $catalogV2.$namespace1"))
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$table1" +
           " (id int, name string, city string)"))
       doAs(
-        "admin",
+        admin,
         sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$outputTable1" 
+
           " (id int, name string, city string)"))
     }
@@ -82,7 +82,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
     // create database
     val e1 = intercept[AccessControlException](
-      doAs("someone", sql(s"CREATE DATABASE IF NOT EXISTS 
$catalogV2.$namespace2").explain()))
+      doAs(someone, sql(s"CREATE DATABASE IF NOT EXISTS 
$catalogV2.$namespace2").explain()))
     assert(e1.getMessage.contains(s"does not have [create] privilege" +
       s" on [$namespace2]"))
   }
@@ -92,7 +92,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
     // create database
     val e1 = intercept[AccessControlException](
-      doAs("someone", sql(s"DROP DATABASE IF EXISTS 
$catalogV2.$namespace2").explain()))
+      doAs(someone, sql(s"DROP DATABASE IF EXISTS 
$catalogV2.$namespace2").explain()))
     assert(e1.getMessage.contains(s"does not have [drop] privilege" +
       s" on [$namespace2]"))
   }
@@ -102,7 +102,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
     // select
     val e1 = intercept[AccessControlException](
-      doAs("someone", sql(s"select city, id from 
$catalogV2.$namespace1.$table1").explain()))
+      doAs(someone, sql(s"select city, id from 
$catalogV2.$namespace1.$table1").explain()))
     assert(e1.getMessage.contains(s"does not have [select] privilege" +
       s" on [$namespace1/$table1/city]"))
   }
@@ -110,7 +110,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
   test("[KYUUBI #4255] DESCRIBE TABLE") {
     assume(isSparkV31OrGreater)
     val e1 = intercept[AccessControlException](
-      doAs("someone", sql(s"DESCRIBE TABLE 
$catalogV2.$namespace1.$table1").explain()))
+      doAs(someone, sql(s"DESCRIBE TABLE 
$catalogV2.$namespace1.$table1").explain()))
     assert(e1.getMessage.contains(s"does not have [select] privilege" +
       s" on [$namespace1/$table1]"))
   }
@@ -120,14 +120,14 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
     // CreateTable
     val e2 = intercept[AccessControlException](
-      doAs("someone", sql(s"CREATE TABLE IF NOT EXISTS 
$catalogV2.$namespace1.$table2")))
+      doAs(someone, sql(s"CREATE TABLE IF NOT EXISTS 
$catalogV2.$namespace1.$table2")))
     assert(e2.getMessage.contains(s"does not have [create] privilege" +
       s" on [$namespace1/$table2]"))
 
     // CreateTableAsSelect
     val e21 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$table2" +
           s" AS select * from $catalogV2.$namespace1.$table1")))
     assert(e21.getMessage.contains(s"does not have [select] privilege" +
@@ -139,7 +139,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
     // DropTable
     val e3 = intercept[AccessControlException](
-      doAs("someone", sql(s"DROP TABLE $catalogV2.$namespace1.$table1")))
+      doAs(someone, sql(s"DROP TABLE $catalogV2.$namespace1.$table1")))
     assert(e3.getMessage.contains(s"does not have [drop] privilege" +
       s" on [$namespace1/$table1]"))
   }
@@ -150,7 +150,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // AppendData: Insert Using a VALUES Clause
     val e4 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"INSERT INTO $catalogV2.$namespace1.$outputTable1 (id, name, 
city)" +
           s" VALUES (1, 'bowenliang123', 'Guangzhou')")))
     assert(e4.getMessage.contains(s"does not have [update] privilege" +
@@ -159,7 +159,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // AppendData: Insert Using a TABLE Statement
     val e42 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"INSERT INTO $catalogV2.$namespace1.$outputTable1 (id, name, 
city)" +
           s" TABLE $catalogV2.$namespace1.$table1")))
     assert(e42.getMessage.contains(s"does not have [select] privilege" +
@@ -168,7 +168,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // AppendData: Insert Using a SELECT Statement
     val e43 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"INSERT INTO $catalogV2.$namespace1.$outputTable1 (id, name, 
city)" +
           s" SELECT * from $catalogV2.$namespace1.$table1")))
     assert(e43.getMessage.contains(s"does not have [select] privilege" +
@@ -177,7 +177,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // OverwriteByExpression: Insert Overwrite
     val e44 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"INSERT OVERWRITE $catalogV2.$namespace1.$outputTable1 (id, name, 
city)" +
           s" VALUES (1, 'bowenliang123', 'Guangzhou')")))
     assert(e44.getMessage.contains(s"does not have [update] privilege" +
@@ -199,7 +199,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // MergeIntoTable:  Using a MERGE INTO Statement
     val e1 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(mergeIntoSql)))
     assert(e1.getMessage.contains(s"does not have [select] privilege" +
       s" on [$namespace1/$table1/id]"))
@@ -210,7 +210,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
         true)
       val e2 = intercept[AccessControlException](
         doAs(
-          "someone",
+          someone,
           sql(mergeIntoSql)))
       assert(e2.getMessage.contains(s"does not have" +
         s" [select] privilege" +
@@ -229,7 +229,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // UpdateTable
     val e5 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"UPDATE $catalogV2.$namespace1.$table1 SET city='Hangzhou' " +
           " WHERE id=1")))
     assert(e5.getMessage.contains(s"does not have [update] privilege" +
@@ -241,7 +241,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
     // DeleteFromTable
     val e6 = intercept[AccessControlException](
-      doAs("someone", sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE 
id=1")))
+      doAs(someone, sql(s"DELETE FROM $catalogV2.$namespace1.$table1 WHERE 
id=1")))
     assert(e6.getMessage.contains(s"does not have [update] privilege" +
       s" on [$namespace1/$table1]"))
   }
@@ -252,7 +252,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // CacheTable
     val e7 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"CACHE TABLE $cacheTable1" +
           s" AS select * from $catalogV2.$namespace1.$table1")))
     if (isSparkV32OrGreater) {
@@ -269,7 +269,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
     val e1 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"TRUNCATE TABLE $catalogV2.$namespace1.$table1")))
     assert(e1.getMessage.contains(s"does not have [update] privilege" +
       s" on [$namespace1/$table1]"))
@@ -280,7 +280,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
 
     val e1 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"MSCK REPAIR TABLE $catalogV2.$namespace1.$table1")))
     assert(e1.getMessage.contains(s"does not have [alter] privilege" +
       s" on [$namespace1/$table1]"))
@@ -292,7 +292,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // AddColumns
     val e61 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 ADD COLUMNS (age int) 
").explain()))
     assert(e61.getMessage.contains(s"does not have [alter] privilege" +
       s" on [$namespace1/$table1]"))
@@ -300,7 +300,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // DropColumns
     val e62 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 DROP COLUMNS city 
").explain()))
     assert(e62.getMessage.contains(s"does not have [alter] privilege" +
       s" on [$namespace1/$table1]"))
@@ -308,7 +308,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // RenameColumn
     val e63 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 RENAME COLUMN city TO 
city2 ").explain()))
     assert(e63.getMessage.contains(s"does not have [alter] privilege" +
       s" on [$namespace1/$table1]"))
@@ -316,7 +316,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // AlterColumn
     val e64 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"ALTER TABLE $catalogV2.$namespace1.$table1 " +
           s"ALTER COLUMN city COMMENT 'city' ")))
     assert(e64.getMessage.contains(s"does not have [alter] privilege" +
@@ -329,7 +329,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // CommentOnNamespace
     val e1 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"COMMENT ON DATABASE $catalogV2.$namespace1 IS 'xYz' 
").explain()))
     assert(e1.getMessage.contains(s"does not have [alter] privilege" +
       s" on [$namespace1]"))
@@ -337,7 +337,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // CommentOnNamespace
     val e2 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"COMMENT ON NAMESPACE $catalogV2.$namespace1 IS 'xYz' 
").explain()))
     assert(e2.getMessage.contains(s"does not have [alter] privilege" +
       s" on [$namespace1]"))
@@ -345,7 +345,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSu
     // CommentOnTable
     val e3 = intercept[AccessControlException](
       doAs(
-        "someone",
+        someone,
         sql(s"COMMENT ON TABLE $catalogV2.$namespace1.$table1 IS 'xYz' 
").explain()))
     assert(e3.getMessage.contains(s"does not have [alter] privilege" +
       s" on [$namespace1/$table1]"))
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingTestBase.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingTestBase.scala
index 29a709311..bae269e7a 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingTestBase.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingTestBase.scala
@@ -17,16 +17,17 @@
 
 package org.apache.kyuubi.plugin.spark.authz.ranger.datamasking
 
-// scalastyle:off
 import java.sql.Timestamp
 
 import scala.util.Try
 
+// scalastyle:off
 import org.apache.commons.codec.digest.DigestUtils.md5Hex
 import org.apache.spark.sql.{Row, SparkSessionExtensions}
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.funsuite.AnyFunSuite
 
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import org.apache.kyuubi.plugin.spark.authz.SparkSessionProvider
 import org.apache.kyuubi.plugin.spark.authz.ranger.RangerSparkExtension
 
@@ -75,18 +76,18 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
   }
 
   override def beforeAll(): Unit = {
-    doAs("admin", setup())
+    doAs(admin, setup())
     super.beforeAll()
   }
   override def afterAll(): Unit = {
-    doAs("admin", cleanup())
+    doAs(admin, cleanup())
     spark.stop
     super.afterAll()
   }
 
   test("simple query with a user doesn't have mask rules") {
     checkAnswer(
-      "kent",
+      kent,
       "SELECT key FROM default.src order by key",
       Seq(Row(1), Row(10), Row(11), Row(20), Row(30)))
   }
@@ -95,12 +96,12 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
     val result =
       Seq(Row(md5Hex("1"), "xxxxx", "worlx", Timestamp.valueOf("2018-01-01 
00:00:00"), "Xorld"))
     checkAnswer(
-      "bob",
+      bob,
       "SELECT value1, value2, value3, value4, value5 FROM default.src " +
         "where key = 1",
       result)
     checkAnswer(
-      "bob",
+      bob,
       "SELECT value1 as key, value2, value3, value4, value5 FROM default.src 
where key = 1",
       result)
   }
@@ -108,14 +109,14 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
   test("star") {
     val result =
       Seq(Row(1, md5Hex("1"), "xxxxx", "worlx", Timestamp.valueOf("2018-01-01 
00:00:00"), "Xorld"))
-    checkAnswer("bob", "SELECT * FROM default.src where key = 1", result)
+    checkAnswer(bob, "SELECT * FROM default.src where key = 1", result)
   }
 
   test("simple udf") {
     val result =
       Seq(Row(md5Hex("1"), "xxxxx", "worlx", Timestamp.valueOf("2018-01-01 
00:00:00"), "Xorld"))
     checkAnswer(
-      "bob",
+      bob,
       "SELECT max(value1), max(value2), max(value3), max(value4), max(value5) 
FROM default.src" +
         " where key = 1",
       result)
@@ -125,7 +126,7 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
     val result =
       Seq(Row(md5Hex("1"), "xxxxx", "worlx", Timestamp.valueOf("2018-01-01 
00:00:00"), "Xorld"))
     checkAnswer(
-      "bob",
+      bob,
       "SELECT coalesce(max(value1), 1), coalesce(max(value2), 1), 
coalesce(max(value3), 1), " +
         "coalesce(max(value4), timestamp '2018-01-01 22:33:44'), 
coalesce(max(value5), 1) " +
         "FROM default.src where key = 1",
@@ -136,7 +137,7 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
     val result =
       Seq(Row(md5Hex("1"), "xxxxx", "worlx", Timestamp.valueOf("2018-01-01 
00:00:00"), "Xorld"))
     checkAnswer(
-      "bob",
+      bob,
       "SELECT value1, value2, value3, value4, value5 FROM default.src WHERE 
value2 in " +
         "(SELECT value2 as key FROM default.src where key = 1)",
       result)
@@ -145,59 +146,59 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
   test("create a unmasked table as select from a masked one") {
     withCleanTmpResources(Seq(("default.src2", "table"))) {
       doAs(
-        "bob",
+        bob,
         sql(s"CREATE TABLE default.src2 $format AS SELECT value1 FROM 
default.src " +
           s"where key = 1"))
-      checkAnswer("bob", "SELECT value1 FROM default.src2", 
Seq(Row(md5Hex("1"))))
+      checkAnswer(bob, "SELECT value1 FROM default.src2", 
Seq(Row(md5Hex("1"))))
     }
   }
 
   test("insert into a unmasked table from a masked one") {
     withCleanTmpResources(Seq(("default.src2", "table"), ("default.src3", 
"table"))) {
-      doAs("bob", sql(s"CREATE TABLE default.src2 (value1 string) $format"))
+      doAs(bob, sql(s"CREATE TABLE default.src2 (value1 string) $format"))
       doAs(
-        "bob",
+        bob,
         sql(s"INSERT INTO default.src2 SELECT value1 from default.src " +
           s"where key = 1"))
       doAs(
-        "bob",
+        bob,
         sql(s"INSERT INTO default.src2 SELECT value1 as v from default.src " +
           s"where key = 1"))
-      checkAnswer("bob", "SELECT value1 FROM default.src2", 
Seq(Row(md5Hex("1")), Row(md5Hex("1"))))
-      doAs("bob", sql(s"CREATE TABLE default.src3 (k int, value string) 
$format"))
+      checkAnswer(bob, "SELECT value1 FROM default.src2", 
Seq(Row(md5Hex("1")), Row(md5Hex("1"))))
+      doAs(bob, sql(s"CREATE TABLE default.src3 (k int, value string) 
$format"))
       doAs(
-        "bob",
+        bob,
         sql(s"INSERT INTO default.src3 SELECT key, value1 from default.src  " +
           s"where key = 1"))
       doAs(
-        "bob",
+        bob,
         sql(s"INSERT INTO default.src3 SELECT key, value1 as v from 
default.src " +
           s"where key = 1"))
-      checkAnswer("bob", "SELECT value FROM default.src3", 
Seq(Row(md5Hex("1")), Row(md5Hex("1"))))
+      checkAnswer(bob, "SELECT value FROM default.src3", Seq(Row(md5Hex("1")), 
Row(md5Hex("1"))))
     }
   }
 
   test("join on an unmasked table") {
     val s = "SELECT a.value1, b.value1 FROM default.src a" +
       " join default.unmasked b on a.value1=b.value1"
-    checkAnswer("bob", s, Nil)
-    checkAnswer("bob", s, Nil) // just for testing query multiple times, don't 
delete it
+    checkAnswer(bob, s, Nil)
+    checkAnswer(bob, s, Nil) // just for testing query multiple times, don't 
delete it
   }
 
   test("self join on a masked table") {
     val s = "SELECT a.value1, b.value1 FROM default.src a" +
       " join default.src b on a.value1=b.value1 where a.key = 1 and b.key = 1 "
-    checkAnswer("bob", s, Seq(Row(md5Hex("1"), md5Hex("1"))))
+    checkAnswer(bob, s, Seq(Row(md5Hex("1"), md5Hex("1"))))
     // just for testing query multiple times, don't delete it
-    checkAnswer("bob", s, Seq(Row(md5Hex("1"), md5Hex("1"))))
+    checkAnswer(bob, s, Seq(Row(md5Hex("1"), md5Hex("1"))))
   }
 
   test("self join on a masked table and filter the masked column with original 
value") {
     val s = "SELECT a.value1, b.value1 FROM default.src a" +
       " join default.src b on a.value1=b.value1" +
       " where a.value1='1' and b.value1='1'"
-    checkAnswer("bob", s, Nil)
-    checkAnswer("bob", s, Nil) // just for testing query multiple times, don't 
delete it
+    checkAnswer(bob, s, Nil)
+    checkAnswer(bob, s, Nil) // just for testing query multiple times, don't 
delete it
   }
 
   test("self join on a masked table and filter the masked column with masked 
value") {
@@ -245,7 +246,7 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
     //                  +- DataMaskingStage0Marker Relation 
default.src[key#60,value1#61,value2#62,value3#63,value4#64,value5#65] parquet
     //                     +- Project [key#153, md5(cast(cast(value1#154 as 
string) as binary)) AS value1#148, 
regexp_replace(regexp_replace(regexp_replace(value2#155, [A-Z], X, 1), [a-z], 
x, 1), [0-9], n, 1) AS value2#149, 
regexp_replace(regexp_replace(regexp_replace(value3#156, [A-Z], X, 5), [a-z], 
x, 5), [0-9], n, 5) AS value3#150, date_trunc(YEAR, value4#157, 
Some(Asia/Shanghai)) AS value4#151, 
concat(regexp_replace(regexp_replace(regexp_replace(left(value5#158, 
(length(value5#158) - [...]
     //                        +- Relation 
default.src[key#153,value1#154,value2#155,value3#156,value4#157,value5#158] 
parquet
-    // checkAnswer("bob", s, Seq(Row(md5Hex("1"), md5Hex("1"))))
+    // checkAnswer(bob, s, Seq(Row(md5Hex("1"), md5Hex("1"))))
     //
     //
     // scalastyle:on
@@ -254,9 +255,9 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
     val s2 = "SELECT a.value1, b.value1 FROM default.src a" +
       " join default.src b on a.value1=b.value1" +
       s" where a.value2='xxxxx' and b.value2='xxxxx'"
-    checkAnswer("bob", s2, Seq(Row(md5Hex("1"), md5Hex("1"))))
+    checkAnswer(bob, s2, Seq(Row(md5Hex("1"), md5Hex("1"))))
     // just for testing query multiple times, don't delete it
-    checkAnswer("bob", s2, Seq(Row(md5Hex("1"), md5Hex("1"))))
+    checkAnswer(bob, s2, Seq(Row(md5Hex("1"), md5Hex("1"))))
   }
 
   test("union an unmasked table") {
@@ -267,30 +268,30 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
           (SELECT b.value1 FROM default.unmasked b)
       ) c order by value1
       """
-    doAs("bob", sql(s).show)
-    checkAnswer("bob", s, Seq(Row("1"), Row("2"), Row("3"), Row("4"), 
Row("5"), Row(md5Hex("1"))))
+    doAs(bob, sql(s).show)
+    checkAnswer(bob, s, Seq(Row("1"), Row("2"), Row("3"), Row("4"), Row("5"), 
Row(md5Hex("1"))))
   }
 
   test("union a masked table") {
     val s = "SELECT a.value1 FROM default.src a where a.key = 1 union" +
       " (SELECT b.value1 FROM default.src b where b.key = 1)"
-    checkAnswer("bob", s, Seq(Row(md5Hex("1"))))
+    checkAnswer(bob, s, Seq(Row(md5Hex("1"))))
   }
 
   test("KYUUBI #3581: permanent view should lookup rule on itself not the raw 
table") {
     assume(isSparkV31OrGreater)
     val supported = doAs(
-      "perm_view_user",
+      permViewUser,
       Try(sql("CREATE OR REPLACE VIEW default.perm_view AS SELECT * FROM 
default.src")).isSuccess)
     assume(supported, s"view support for '$format' has not been implemented 
yet")
 
     withCleanTmpResources(Seq(("default.perm_view", "view"))) {
       checkAnswer(
-        "perm_view_user",
+        permViewUser,
         "SELECT value1, value2 FROM default.src where key = 1",
         Seq(Row(1, "hello")))
       checkAnswer(
-        "perm_view_user",
+        permViewUser,
         "SELECT value1, value2 FROM default.perm_view where key = 1",
         Seq(Row(md5Hex("1"), "hello")))
     }
@@ -303,7 +304,7 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
     val s2 = s"SELECT * FROM default.src where key = 11"
     // scalastyle:off
     checkAnswer(
-      "bob",
+      bob,
       s1,
       Seq(Row(
         10,
@@ -313,7 +314,7 @@ trait DataMaskingTestBase extends AnyFunSuite with 
SparkSessionProvider with Bef
         Timestamp.valueOf("2018-01-01 00:00:00"),
         "xxxxxUXXXXUnnnUUUUUUXUUUUUUUUUア叶葉엽")))
     checkAnswer(
-      "bob",
+      bob,
       s2,
       Seq(Row(
         11,
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringTestBase.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringTestBase.scala
index a73690724..3236c97b1 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringTestBase.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringTestBase.scala
@@ -24,6 +24,7 @@ import org.apache.spark.sql.{Row, SparkSessionExtensions}
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.funsuite.AnyFunSuite
 
+import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import org.apache.kyuubi.plugin.spark.authz.SparkSessionProvider
 import org.apache.kyuubi.plugin.spark.authz.ranger.RangerSparkExtension
 
@@ -47,72 +48,72 @@ trait RowFilteringTestBase extends AnyFunSuite with 
SparkSessionProvider with Be
   }
 
   override def beforeAll(): Unit = {
-    doAs("admin", setup())
+    doAs(admin, setup())
     super.beforeAll()
   }
   override def afterAll(): Unit = {
-    doAs("admin", cleanup())
+    doAs(admin, cleanup())
     spark.stop
     super.afterAll()
   }
 
   test("user without row filtering rule") {
     checkAnswer(
-      "kent",
+      kent,
       "SELECT key FROM default.src order order by key",
       Seq(Row(1), Row(20), Row(30)))
   }
 
   test("simple query projecting filtering column") {
-    checkAnswer("bob", "SELECT key FROM default.src", Seq(Row(1)))
+    checkAnswer(bob, "SELECT key FROM default.src", Seq(Row(1)))
   }
 
   test("simple query projecting non filtering column") {
-    checkAnswer("bob", "SELECT value FROM default.src", Seq(Row(1)))
+    checkAnswer(bob, "SELECT value FROM default.src", Seq(Row(1)))
   }
 
   test("simple query projecting non filtering column with udf max") {
-    checkAnswer("bob", "SELECT max(value) FROM default.src", Seq(Row(1)))
+    checkAnswer(bob, "SELECT max(value) FROM default.src", Seq(Row(1)))
   }
 
   test("simple query projecting non filtering column with udf coalesce") {
-    checkAnswer("bob", "SELECT coalesce(max(value), 1) FROM default.src", 
Seq(Row(1)))
+    checkAnswer(bob, "SELECT coalesce(max(value), 1) FROM default.src", 
Seq(Row(1)))
   }
 
   test("in subquery") {
     checkAnswer(
-      "bob",
+      bob,
       "SELECT value FROM default.src WHERE value in (SELECT value as key FROM 
default.src)",
       Seq(Row(1)))
   }
 
   test("ctas") {
     withCleanTmpResources(Seq(("default.src2", "table"))) {
-      doAs("bob", sql(s"CREATE TABLE default.src2 $format AS SELECT value FROM 
default.src"))
+      doAs(bob, sql(s"CREATE TABLE default.src2 $format AS SELECT value FROM 
default.src"))
       val query = "select value from default.src2"
-      checkAnswer("admin", query, Seq(Row(1)))
-      checkAnswer("bob", query, Seq(Row(1)))
+      checkAnswer(admin, query, Seq(Row(1)))
+      checkAnswer(bob, query, Seq(Row(1)))
     }
   }
 
   test("[KYUUBI #3581]: row level filter on permanent view") {
     assume(isSparkV31OrGreater)
     val supported = doAs(
-      "perm_view_user",
+      permViewUser,
       Try(sql("CREATE OR REPLACE VIEW default.perm_view AS SELECT * FROM 
default.src")).isSuccess)
     assume(supported, s"view support for '$format' has not been implemented 
yet")
 
     withCleanTmpResources(Seq((s"default.perm_view", "view"))) {
       checkAnswer(
-        "admin",
+        admin,
         "SELECT key FROM default.perm_view order order by key",
         Seq(Row(1), Row(20), Row(30)))
-      checkAnswer("bob", "SELECT key FROM default.perm_view", Seq(Row(1)))
-      checkAnswer("bob", "SELECT value FROM default.perm_view", Seq(Row(1)))
-      checkAnswer("bob", "SELECT max(value) FROM default.perm_view", 
Seq(Row(1)))
-      checkAnswer("bob", "SELECT coalesce(max(value), 1) FROM 
default.perm_view", Seq(Row(1)))
+      checkAnswer(bob, "SELECT key FROM default.perm_view", Seq(Row(1)))
+      checkAnswer(bob, "SELECT value FROM default.perm_view", Seq(Row(1)))
+      checkAnswer(bob, "SELECT max(value) FROM default.perm_view", Seq(Row(1)))
+      checkAnswer(bob, "SELECT coalesce(max(value), 1) FROM 
default.perm_view", Seq(Row(1)))
       checkAnswer(
-        "bob",
+        bob,
         "SELECT value FROM default.perm_view WHERE value in " +
           "(SELECT value as key FROM default.perm_view)",
         Seq(Row(1)))


Reply via email to