Github user manishgupta88 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2140#discussion_r179661112
--- Diff:
integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
---
@@ -493,18 +493,15 @@ object CarbonDataRDDFactory {
}
// as no record loaded in new segment, new segment should be deleted
val newEntryLoadStatus =
- if
(!carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable.isChildDataMap &&
+ if
(!carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable.isChildDataMap &&
!CarbonLoaderUtil.isValidSegment(carbonLoadModel,
carbonLoadModel.getSegmentId.toInt)) {
- LOGGER.audit(s"Data load is failed for " +
- s"${ carbonLoadModel.getDatabaseName }.${
carbonLoadModel.getTableName }" +
- " as there is no data to load")
- LOGGER.warn("Cannot write load metadata file as data load failed")
+ LOGGER.warn("Cannot write load metadata file as there is no data
to load")
- SegmentStatus.MARKED_FOR_DELETE
- } else {
- loadStatus
- }
+ SegmentStatus.MARKED_FOR_DELETE
+ } else {
+ loadStatus
+ }
--- End diff --
correct the formatting of code
---