kunal642 commented on a change in pull request #3583: [CARBONDATA-3687] Support 
writing non-transactional carbondata files through hive
URL: https://github.com/apache/carbondata/pull/3583#discussion_r388167385
 
 

 ##########
 File path: 
integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonOutputFormat.java
 ##########
 @@ -18,43 +18,115 @@
 package org.apache.carbondata.hive;
 
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.Map;
 import java.util.Properties;
 
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.metadata.schema.PartitionInfo;
+import org.apache.carbondata.core.util.ObjectSerializationUtil;
+import org.apache.carbondata.core.util.ThreadLocalSessionInfo;
 import org.apache.carbondata.hadoop.api.CarbonTableOutputFormat;
+import org.apache.carbondata.hadoop.internal.ObjectArrayWritable;
+import org.apache.carbondata.hive.util.HiveCarbonUtil;
+import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
+import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.mapred.RecordWriter;
-import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.hadoop.util.Progressable;
 
-/**
- * TODO : To extend CarbonOutputFormat
- */
 public class MapredCarbonOutputFormat<T> extends CarbonTableOutputFormat
-    implements HiveOutputFormat<Void, T> {
+    implements HiveOutputFormat<Void, T>, OutputFormat<Void, T> {
 
   @Override
   public RecordWriter<Void, T> getRecordWriter(FileSystem fileSystem, JobConf 
jobConf, String s,
-      Progressable progressable) {
-    return null;
+      Progressable progressable) throws IOException {
+    throw new RuntimeException("Should never be used");
   }
 
   @Override
-  public void checkOutputSpecs(FileSystem fileSystem, JobConf jobConf)
-      throws IOException {
-    org.apache.hadoop.mapreduce.JobContext jobContext = 
Job.getInstance(jobConf);
-    super.checkOutputSpecs(jobContext);
+  public void checkOutputSpecs(FileSystem fileSystem, JobConf jobConf) throws 
IOException {
   }
 
   @Override
   public FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jc, Path 
finalOutPath,
       Class<? extends Writable> valueClass, boolean isCompressed, Properties 
tableProperties,
-      Progressable progress) {
-    return null;
+      Progressable progress) throws IOException {
+    CarbonLoadModel carbonLoadModel = null;
+    String encodedString = jc.get(LOAD_MODEL);
+    if (encodedString != null) {
+      carbonLoadModel =
+          (CarbonLoadModel) 
ObjectSerializationUtil.convertStringToObject(encodedString);
+    }
+    if (carbonLoadModel == null) {
+      carbonLoadModel = HiveCarbonUtil.getCarbonLoadModel(tableProperties, jc);
 
 Review comment:
   done

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to