Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16674#discussion_r100595967
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala ---
    @@ -452,311 +542,96 @@ class SQLViewSuite extends QueryTest with 
SQLTestUtils with TestHiveSingleton {
         }
       }
     
    -  test("create hive view for joined tables") {
    -    // make sure the new flag can handle some complex cases like join and 
schema change.
    -    withTable("jt1", "jt2") {
    -      spark.range(1, 
10).toDF("id1").write.format("json").saveAsTable("jt1")
    -      spark.range(1, 
10).toDF("id2").write.format("json").saveAsTable("jt2")
    -      sql("CREATE VIEW testView AS SELECT * FROM jt1 JOIN jt2 ON id1 == 
id2")
    -      checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 
9).map(i => Row(i, i)))
    -
    -      val df = (1 until 10).map(i => i -> i).toDF("id1", "newCol")
    -      df.write.format("json").mode(SaveMode.Overwrite).saveAsTable("jt1")
    -      checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 
9).map(i => Row(i, i)))
    -
    -      sql("DROP VIEW testView")
    -    }
    -  }
    -
    -  test("SPARK-14933 - create view from hive parquet table") {
    -    withTable("t_part") {
    -      withView("v_part") {
    -        spark.sql("create table t_part stored as parquet as select 1 as a, 
2 as b")
    -        spark.sql("create view v_part as select * from t_part")
    -        checkAnswer(
    -          sql("select * from t_part"),
    -          sql("select * from v_part"))
    -      }
    -    }
    -  }
    -
    -  test("SPARK-14933 - create view from hive orc table") {
    -    withTable("t_orc") {
    -      withView("v_orc") {
    -        spark.sql("create table t_orc stored as orc as select 1 as a, 2 as 
b")
    -        spark.sql("create view v_orc as select * from t_orc")
    -        checkAnswer(
    -          sql("select * from t_orc"),
    -          sql("select * from v_orc"))
    -      }
    -    }
    -  }
    -
    -  test("create a permanent/temp view using a hive, built-in, and permanent 
user function") {
    -    val permanentFuncName = "myUpper"
    -    val permanentFuncClass =
    -      
classOf[org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper].getCanonicalName
    -    val builtInFuncNameInLowerCase = "abs"
    -    val builtInFuncNameInMixedCase = "aBs"
    -    val hiveFuncName = "histogram_numeric"
    -
    -    withUserDefinedFunction(permanentFuncName -> false) {
    -      sql(s"CREATE FUNCTION $permanentFuncName AS '$permanentFuncClass'")
    -      withTable("tab1") {
    -        (1 to 10).map(i => (s"$i", i)).toDF("str", 
"id").write.saveAsTable("tab1")
    -        Seq("VIEW", "TEMPORARY VIEW").foreach { viewMode =>
    -          withView("view1") {
    -            sql(
    -              s"""
    -                 |CREATE $viewMode view1
    -                 |AS SELECT
    -                 |$permanentFuncName(str),
    -                 |$builtInFuncNameInLowerCase(id),
    -                 |$builtInFuncNameInMixedCase(id) as aBs,
    -                 |$hiveFuncName(id, 5) over()
    -                 |FROM tab1
    -               """.stripMargin)
    -            checkAnswer(sql("select count(*) FROM view1"), Row(10))
    -          }
    -        }
    -      }
    -    }
    -  }
    -
    -  test("create a permanent/temp view using a temporary function") {
    -    val tempFunctionName = "temp"
    -    val functionClass =
    -      
classOf[org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper].getCanonicalName
    -    withUserDefinedFunction(tempFunctionName -> true) {
    -      sql(s"CREATE TEMPORARY FUNCTION $tempFunctionName AS 
'$functionClass'")
    -      withView("view1", "tempView1") {
    -        withTable("tab1") {
    -          (1 to 10).map(i => s"$i").toDF("id").write.saveAsTable("tab1")
    -
    -          // temporary view
    -          sql(s"CREATE TEMPORARY VIEW tempView1 AS SELECT 
$tempFunctionName(id) from tab1")
    -          checkAnswer(sql("select count(*) FROM tempView1"), Row(10))
    -
    -          // permanent view
    -          val e = intercept[AnalysisException] {
    -            sql(s"CREATE VIEW view1 AS SELECT $tempFunctionName(id) from 
tab1")
    -          }.getMessage
    -          assert(e.contains("Not allowed to create a permanent view 
`view1` by referencing " +
    -            s"a temporary function `$tempFunctionName`"))
    -        }
    -      }
    -    }
    -  }
    -
    -  test("correctly resolve a nested view") {
    -    withTempDatabase { db =>
    -      withView(s"$db.view1", s"$db.view2") {
    -        val view1 = CatalogTable(
    -          identifier = TableIdentifier("view1", Some(db)),
    -          tableType = CatalogTableType.VIEW,
    -          storage = CatalogStorageFormat.empty,
    -          schema = new StructType().add("x", "long").add("y", "long"),
    -          viewText = Some("SELECT * FROM jt"),
    -          properties = Map(CatalogTable.VIEW_DEFAULT_DATABASE -> "default",
    -            CatalogTable.VIEW_QUERY_OUTPUT_NUM_COLUMNS -> "2",
    -            s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}0" -> 
"id",
    -            s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}1" -> 
"id1"))
    -        val view2 = CatalogTable(
    -          identifier = TableIdentifier("view2", Some(db)),
    -          tableType = CatalogTableType.VIEW,
    -          storage = CatalogStorageFormat.empty,
    -          schema = new StructType().add("id", "long").add("id1", "long"),
    -          viewText = Some("SELECT * FROM view1"),
    -          properties = Map(CatalogTable.VIEW_DEFAULT_DATABASE -> db,
    -            CatalogTable.VIEW_QUERY_OUTPUT_NUM_COLUMNS -> "2",
    -            s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}0" -> 
"x",
    -            s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}1" -> 
"y"))
    -        activateDatabase(db) {
    -          hiveContext.sessionState.catalog.createTable(view1, 
ignoreIfExists = false)
    -          hiveContext.sessionState.catalog.createTable(view2, 
ignoreIfExists = false)
    -          checkAnswer(sql("SELECT * FROM view2 ORDER BY id"), (1 to 
9).map(i => Row(i, i)))
    -        }
    -      }
    -    }
    -  }
    -
    -  test("correctly resolve a view with CTE") {
    -    withView("cte_view") {
    -      val cte_view = CatalogTable(
    -        identifier = TableIdentifier("cte_view"),
    -        tableType = CatalogTableType.VIEW,
    -        storage = CatalogStorageFormat.empty,
    -        schema = new StructType().add("n", "int"),
    -        viewText = Some("WITH w AS (SELECT 1 AS n) SELECT n FROM w"),
    -        properties = Map(CatalogTable.VIEW_DEFAULT_DATABASE -> "default",
    -          CatalogTable.VIEW_QUERY_OUTPUT_NUM_COLUMNS -> "1",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}0" -> "n"))
    -      hiveContext.sessionState.catalog.createTable(cte_view, 
ignoreIfExists = false)
    -      checkAnswer(sql("SELECT * FROM cte_view"), Row(1))
    -    }
    -  }
    -
    -  test("correctly resolve a view in a self join") {
    -    withView("join_view") {
    -      val join_view = CatalogTable(
    -        identifier = TableIdentifier("join_view"),
    -        tableType = CatalogTableType.VIEW,
    -        storage = CatalogStorageFormat.empty,
    -        schema = new StructType().add("id", "long").add("id1", "long"),
    -        viewText = Some("SELECT * FROM jt"),
    -        properties = Map(CatalogTable.VIEW_DEFAULT_DATABASE -> "default",
    -          CatalogTable.VIEW_QUERY_OUTPUT_NUM_COLUMNS -> "2",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}0" -> "id",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}1" -> 
"id1"))
    -      hiveContext.sessionState.catalog.createTable(join_view, 
ignoreIfExists = false)
    -      checkAnswer(
    -        sql("SELECT * FROM join_view t1 JOIN join_view t2 ON t1.id = t2.id 
ORDER BY t1.id"),
    -        (1 to 9).map(i => Row(i, i, i, i)))
    -    }
    -  }
    -
    -  private def assertInvalidReference(query: String): Unit = {
    -    val e = intercept[AnalysisException] {
    -      sql(query)
    -    }.getMessage
    -    assert(e.contains("Table or view not found"))
    -  }
    -
       test("error handling: fail if the referenced table or view is invalid") {
         withView("view1", "view2", "view3") {
           // Fail if the referenced table is defined in a invalid database.
    -      val view1 = CatalogTable(
    -        identifier = TableIdentifier("view1"),
    -        tableType = CatalogTableType.VIEW,
    -        storage = CatalogStorageFormat.empty,
    -        schema = new StructType().add("id", "long").add("id1", "long"),
    -        viewText = Some("SELECT * FROM invalid_db.jt"),
    -        properties = Map(CatalogTable.VIEW_DEFAULT_DATABASE -> "default",
    -          CatalogTable.VIEW_QUERY_OUTPUT_NUM_COLUMNS -> "2",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}0" -> "id",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}1" -> 
"id1"))
    -      hiveContext.sessionState.catalog.createTable(view1, ignoreIfExists = 
false)
    +      withTempDatabase { db =>
    +        withTable(s"$db.table1") {
    +          activateDatabase(db) {
    +            sql("CREATE TABLE table1(a int, b string) USING parquet")
    +            sql("CREATE VIEW default.view1 AS SELECT * FROM table1")
    +          }
    +        }
    +      }
           assertInvalidReference("SELECT * FROM view1")
     
           // Fail if the referenced table is invalid.
    -      val view2 = CatalogTable(
    -        identifier = TableIdentifier("view2"),
    -        tableType = CatalogTableType.VIEW,
    -        storage = CatalogStorageFormat.empty,
    -        schema = new StructType().add("id", "long").add("id1", "long"),
    -        viewText = Some("SELECT * FROM invalid_table"),
    -        properties = Map(CatalogTable.VIEW_DEFAULT_DATABASE -> "default",
    -          CatalogTable.VIEW_QUERY_OUTPUT_NUM_COLUMNS -> "2",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}0" -> "id",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}1" -> 
"id1"))
    -      hiveContext.sessionState.catalog.createTable(view2, ignoreIfExists = 
false)
    +      withTable("table2") {
    +        sql("CREATE TABLE table2(a int, b string) USING parquet")
    +        sql("CREATE VIEW view2 AS SELECT * FROM table2")
    +      }
           assertInvalidReference("SELECT * FROM view2")
     
           // Fail if the referenced view is invalid.
    -      val view3 = CatalogTable(
    -        identifier = TableIdentifier("view3"),
    -        tableType = CatalogTableType.VIEW,
    -        storage = CatalogStorageFormat.empty,
    -        schema = new StructType().add("id", "long").add("id1", "long"),
    -        viewText = Some("SELECT * FROM view2"),
    -        properties = Map(CatalogTable.VIEW_DEFAULT_DATABASE -> "default",
    -          CatalogTable.VIEW_QUERY_OUTPUT_NUM_COLUMNS -> "2",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}0" -> "id",
    -          s"${CatalogTable.VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX}1" -> 
"id1"))
    -      hiveContext.sessionState.catalog.createTable(view3, ignoreIfExists = 
false)
    +      withView("testView") {
    +        sql("CREATE VIEW testView AS SELECT * FROM jt")
    +        sql("CREATE VIEW view3 AS SELECT * FROM testView")
    +      }
           assertInvalidReference("SELECT * FROM view3")
         }
       }
     
    -  test("make sure we can resolve view created by old version of Spark") {
    -    withTable("hive_table") {
    -      withView("old_view") {
    -        spark.sql("CREATE TABLE hive_table AS SELECT 1 AS a, 2 AS b")
    -        // The views defined by older versions of Spark(before 2.2) will 
have empty view default
    -        // database name, and all the relations referenced in the viewText 
will have database part
    -        // defined.
    -        val view = CatalogTable(
    -          identifier = TableIdentifier("old_view"),
    -          tableType = CatalogTableType.VIEW,
    -          storage = CatalogStorageFormat.empty,
    -          schema = new StructType().add("a", "int").add("b", "int"),
    -          viewText = Some("SELECT `gen_attr_0` AS `a`, `gen_attr_1` AS `b` 
FROM (SELECT " +
    -            "`gen_attr_0`, `gen_attr_1` FROM (SELECT `a` AS `gen_attr_0`, 
`b` AS " +
    -            "`gen_attr_1` FROM hive_table) AS gen_subquery_0) AS 
hive_table")
    -        )
    -        hiveContext.sessionState.catalog.createTable(view, ignoreIfExists 
= false)
    -        val df = sql("SELECT * FROM old_view")
    -        // Check the output rows.
    -        checkAnswer(df, Row(1, 2))
    -        // Check the output schema.
    -        assert(df.schema.sameType(view.schema))
    -      }
    +  test("correctly resolve a view in a self join") {
    +    withView("testView") {
    +      sql("CREATE VIEW testView AS SELECT * FROM jt")
    +      checkAnswer(
    +        sql("SELECT * FROM testView t1 JOIN testView t2 ON t1.id = t2.id 
ORDER BY t1.id"),
    +        (1 to 9).map(i => Row(i, i, i, i)))
         }
       }
     
       test("resolve a view with custom column names") {
    --- End diff --
    
    is this test duplicated with 
https://github.com/apache/spark/pull/16674/files#diff-e09027394aebb11c880b375693f59fafR273
 ?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to