Github user steveloughran commented on a diff in the pull request:
https://github.com/apache/spark/pull/19448#discussion_r143992362
--- Diff:
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCommitterSuite.scala
---
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.datasources.parquet
+
+import java.io.FileNotFoundException
+
+import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.{FileStatus, Path}
+import org.apache.hadoop.mapreduce.{JobContext, TaskAttemptContext}
+import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
+import org.apache.parquet.hadoop.{ParquetOutputCommitter,
ParquetOutputFormat}
+
+import org.apache.spark.{LocalSparkContext, SparkFunSuite}
+import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.sql.test.SQLTestUtils
+
+/**
+ * Test logic related to choice of output commtters
+ */
+class ParquetCommitterSuite extends SparkFunSuite with SQLTestUtils
+ with LocalSparkContext {
+
+ private val PARQUET_COMMITTER =
classOf[ParquetOutputCommitter].getCanonicalName
+
+ protected var spark: SparkSession = _
+
+ /**
+ * Create a new [[SparkSession]] running in local-cluster mode with
unsafe and codegen enabled.
+ */
+ override def beforeAll(): Unit = {
+ super.beforeAll()
+ spark = SparkSession.builder()
+ .master("local-cluster[2,1,1024]")
+ .appName("testing")
+ .getOrCreate()
+ }
+
+ override def afterAll(): Unit = {
+ spark.stop()
+ spark = null
+ }
+
+ test("alternative output committer, merge schema") {
+ intercept[RuntimeException] {
+ val stat = writeDataFrame(MarkingFileOutput.COMMITTER, true, true)
+ logError(s"Created marker file $stat")
+ }
+ }
+
+ test("alternative output committer, no merge schema") {
+ writeDataFrame(MarkingFileOutput.COMMITTER, false, true)
--- End diff --
OK
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]