maropu commented on a change in pull request #24715: [SPARK-25474][SQL] Data
source tables support fallback to HDFS for size estimation
URL: https://github.com/apache/spark/pull/24715#discussion_r314587352
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/command/CommandUtils.scala
##########
@@ -345,14 +345,16 @@ object CommandUtils extends Logging {
!path.getName.startsWith(stagingDir) && DataSourceUtils.isDataPath(path)
}
- def getSizeInBytesFallBackToHdfs(session: SparkSession, path: Path,
defaultSize: Long): Long = {
+ def getSizeInBytesFallBackToHdfs(session: SparkSession, table:
CatalogTable): Long = {
try {
val hadoopConf = session.sessionState.newHadoopConf()
- path.getFileSystem(hadoopConf).getContentSummary(path).getLength
+ val tablePath = new Path(table.location)
+ val fs: FileSystem = tablePath.getFileSystem(hadoopConf)
+ fs.getContentSummary(tablePath).getLength
} catch {
case NonFatal(e) =>
- logWarning(s"Failed to get table size from hdfs. Using the default
size, $defaultSize.", e)
- defaultSize
+ logWarning(s"Failed to get table size from HDFS. Using the default
data size.", e)
Review comment:
btw, we don't need to print `defaultSize` anymore?
```
val defaultSize = session.sessionState.conf.defaultSizeInBytes
logWarning(s"Failed to get table size from hdfs. Using the default
size, $defaultSize.", e)
defaultSize
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]