This is an automated email from the ASF dual-hosted git repository.

akashrn5 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new b61e31d  [CARBONDATA-3822] Fixed load time taken and added format back
b61e31d is described below

commit b61e31d446a88aee05fb9a04f2933cb1f4cd8f3c
Author: kunal642 <[email protected]>
AuthorDate: Wed May 13 10:19:02 2020 +0530

    [CARBONDATA-3822] Fixed load time taken and added format back
    
    Why is this PR needed?
    1. Load time taken is shown as PT-1.0S which is wrong(negative time)
    2. Show segment is missing format information.
    
    What changes were proposed in this PR?
    1. Fix the time and remove PT to show the time as "1.0S" for better viewing.
    2. Added format back
    
    This closes #3765
---
 .../src/main/scala/org/apache/carbondata/api/CarbonStore.scala |  6 +++---
 .../command/management/CarbonShowSegmentsCommand.scala         |  6 ++++--
 .../spark/testsuite/addsegment/AddSegmentTestCase.scala        |  2 +-
 .../spark/testsuite/segment/ShowSegmentTestCase.scala          | 10 ++++++++++
 4 files changed, 18 insertions(+), 6 deletions(-)

diff --git 
a/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala 
b/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
index f516431..d86f7be 100644
--- 
a/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
+++ 
b/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
@@ -115,9 +115,9 @@ object CarbonStore {
       "NA"
     } else {
       Duration.between(
-        Instant.ofEpochMilli(load.getLoadEndTime),
-        Instant.ofEpochMilli(load.getLoadStartTime)
-      ).toString
+        Instant.ofEpochMilli(load.getLoadStartTime),
+        Instant.ofEpochMilli(load.getLoadEndTime)
+      ).toString.replace("PT", "")
     }
   }
 
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowSegmentsCommand.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowSegmentsCommand.scala
index c3157ca..53ea021 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowSegmentsCommand.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowSegmentsCommand.scala
@@ -42,7 +42,8 @@ case class CarbonShowSegmentsCommand(
       AttributeReference("Load Time Taken", StringType, nullable = true)(),
       AttributeReference("Partition", StringType, nullable = true)(),
       AttributeReference("Data Size", StringType, nullable = false)(),
-      AttributeReference("Index Size", StringType, nullable = false)())
+      AttributeReference("Index Size", StringType, nullable = false)(),
+      AttributeReference("File Format", StringType, nullable = false)())
   }
 
   override def processData(sparkSession: SparkSession): Seq[Row] = {
@@ -91,7 +92,8 @@ case class CarbonShowSegmentsCommand(
           timeTaken,
           partitionString,
           Strings.formatSize(dataSize.toFloat),
-          Strings.formatSize(indexSize.toFloat))
+          Strings.formatSize(indexSize.toFloat),
+          segment.getFileFormat.toString)
       }.toSeq
   }
 }
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
index 0fd32c5..7b7af40 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
@@ -228,10 +228,10 @@ class AddSegmentTestCase extends QueryTest with 
BeforeAndAfterAll {
     checkAnswer(sql("select empname from addsegment1 where empname='arvind'"), 
Seq(Row("arvind"),Row("arvind")))
     checkAnswer(sql("select count(empname) from addsegment1"), Seq(Row(20)))
     checkAnswer(sql("select count(*) from addsegment1"), Seq(Row(20)))
-    sql("show segments for table addsegment1").show(100, false)
     val showSeg = sql("show segments for table addsegment1").collectAsList()
     val descFormattedSize = sql("desc formatted 
addsegment1").collect().filter(_.get(0).toString.startsWith("Table Data 
Size")).head.get(1).toString
     val size = getDataSize(newPath)
+    assert(showSeg.get(0).getString(7).equalsIgnoreCase("parquet"))
     assert(descFormattedSize.split("KB")(0).toDouble > 0.0d)
     assert(showSeg.get(0).get(5).toString.equalsIgnoreCase(size))
     assert(showSeg.get(0).get(6).toString.equalsIgnoreCase("NA"))
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/segment/ShowSegmentTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/segment/ShowSegmentTestCase.scala
index a6d481f..0fbd39c 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/segment/ShowSegmentTestCase.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/segment/ShowSegmentTestCase.scala
@@ -187,6 +187,16 @@ class ShowSegmentTestCase extends QueryTest with 
BeforeAndAfterAll {
     dropTable(tableName)
   }
 
+  test("test for load time and format name") {
+    sql("drop table if exists a")
+    sql("create table a(a string) stored as carbondata")
+    sql("insert into a select 'k'")
+    val rows = sql("show segments for table a").collect()
+    assert(rows(0).getString(3).replace("S", "").toDouble > 0)
+    assert(rows(0).getString(7).equalsIgnoreCase("columnar_v3"))
+    sql("drop table if exists a")
+  }
+
   private def insertTestDataIntoTable(tableName: String) = {
     sql(s"insert into ${ tableName } select 'abc1',1")
     sql(s"insert into ${ tableName } select 'abc2',2")

Reply via email to