This is an automated email from the ASF dual-hosted git repository.
diwu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris-spark-connector.git
The following commit(s) were added to refs/heads/master by this push:
new 0b7e861 remove unused classes of DorisWriterOption and
DorisWriterOptionKeys (#87)
0b7e861 is described below
commit 0b7e861bd53b0ad3cb0a5ca44daadab69700b07f
Author: Bowen Liang <[email protected]>
AuthorDate: Fri Mar 31 18:10:23 2023 +0800
remove unused classes of DorisWriterOption and DorisWriterOptionKeys (#87)
---
.../doris/spark/sql/DorisStreamLoadSink.scala | 3 +-
.../apache/doris/spark/sql/DorisWriterOption.scala | 41 ----------------------
.../doris/spark/sql/DorisWriterOptionKeys.scala | 28 ---------------
3 files changed, 1 insertion(+), 71 deletions(-)
diff --git
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisStreamLoadSink.scala
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisStreamLoadSink.scala
index 4796e4b..e6c9960 100644
---
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisStreamLoadSink.scala
+++
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisStreamLoadSink.scala
@@ -18,7 +18,6 @@
package org.apache.doris.spark.sql
import org.apache.doris.spark.cfg.{ConfigurationOptions, SparkSettings}
-import org.apache.doris.spark.sql.DorisWriterOptionKeys.maxRowCount
import org.apache.doris.spark.{CachedDorisStreamLoadClient, DorisStreamLoad}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.execution.streaming.Sink
@@ -90,7 +89,7 @@ private[sql] class DorisStreamLoadSink(sqlContext:
SQLContext, settings: SparkSe
throw new IOException("unable to flush; interrupted while
doing another attempt", ex)
}
}
- throw new IOException(s"Failed to load $maxRowCount batch data on
BE: ${dorisStreamLoader.getLoadUrlStr} node and exceeded the max
${maxRetryTimes} retry times.", err)
+ throw new IOException(s"Failed to load batch data on BE:
${dorisStreamLoader.getLoadUrlStr} node and exceeded the max ${maxRetryTimes}
retry times.", err)
}
}
}
diff --git
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisWriterOption.scala
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisWriterOption.scala
deleted file mode 100644
index 69238c7..0000000
---
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisWriterOption.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License.
-package org.apache.doris.spark.sql
-
-import org.apache.doris.spark.exception.DorisException
-
-class DorisWriterOption(val feHostPort: String ,val dbName: String,val tbName:
String,
- val user: String ,val password: String,
- val maxRowCount: Long,val maxRetryTimes:Int)
-
-object DorisWriterOption{
- def apply(parameters: Map[String, String]): DorisWriterOption={
- val feHostPort: String =
parameters.getOrElse(DorisWriterOptionKeys.feHostPort, throw new
DorisException("feHostPort is empty"))
-
- val dbName: String = parameters.getOrElse(DorisWriterOptionKeys.dbName,
throw new DorisException("dbName is empty"))
-
- val tbName: String = parameters.getOrElse(DorisWriterOptionKeys.tbName,
throw new DorisException("tbName is empty"))
-
- val user: String = parameters.getOrElse(DorisWriterOptionKeys.user, throw
new DorisException("user is empty"))
-
- val password: String = parameters.getOrElse(DorisWriterOptionKeys.password,
throw new DorisException("password is empty"))
-
- val maxRowCount: Long =
parameters.getOrElse(DorisWriterOptionKeys.maxRowCount, "1024").toLong
- val maxRetryTimes: Int =
parameters.getOrElse(DorisWriterOptionKeys.maxRetryTimes, "3").toInt
- new DorisWriterOption(feHostPort, dbName, tbName, user, password,
maxRowCount, maxRetryTimes)
- }
-}
\ No newline at end of file
diff --git
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisWriterOptionKeys.scala
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisWriterOptionKeys.scala
deleted file mode 100644
index 9cadd9f..0000000
---
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/DorisWriterOptionKeys.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License.
-package org.apache.doris.spark.sql
-
-object DorisWriterOptionKeys {
- val feHostPort="feHostPort"
- val dbName="dbName"
- val tbName="tbName"
- val user="user"
- val password="password"
- val maxRowCount="maxRowCount"
- val maxRetryTimes="maxRetryTimes"
-
-}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]