Repository: sqoop
Updated Branches:
  refs/heads/trunk 52604b166 -> e1c6e4a73


SQOOP-1627: Fix Hadoop100  and Hadoop20 profile

(Venkat Ranganathan via Abraham Elmahrek)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/e1c6e4a7
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/e1c6e4a7
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/e1c6e4a7

Branch: refs/heads/trunk
Commit: e1c6e4a7317cc5893d1e930663c92a05ec424603
Parents: 52604b1
Author: Abraham Elmahrek <[email protected]>
Authored: Wed Nov 26 14:40:44 2014 -0800
Committer: Abraham Elmahrek <[email protected]>
Committed: Wed Nov 26 14:41:22 2014 -0800

----------------------------------------------------------------------
 build.xml                                       |  8 +--
 .../apache/sqoop/mapreduce/HBaseImportJob.java  | 21 ++++++-
 .../TestMainframeDatasetFTPRecordReader.java    |  2 +-
 .../TestMainframeDatasetInputFormat.java        |  2 +-
 .../SqlServerUpsertOutputFormatTest.java        | 60 +++++++++++++++-----
 5 files changed, 71 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/e1c6e4a7/build.xml
----------------------------------------------------------------------
diff --git a/build.xml b/build.xml
index 21fd406..1be4e46 100644
--- a/build.xml
+++ b/build.xml
@@ -125,9 +125,9 @@
   <if>
     <equals arg1="${hadoopversion}" arg2="20" />
     <then>
-      <property name="hadoop.version" value="0.20.2-cdh3u1" />
-      <property name="hbase94.version" value="0.90.3-cdh3u1" />
-      <property name="zookeeper.version" value="3.3.3-cdh3u1" />
+      <property name="hadoop.version" value="0.20.2-cdh3u5" />
+      <property name="hbase94.version" value="0.90.6-cdh3u5" />
+      <property name="zookeeper.version" value="3.3.3-cdh3u5" />
       <property name="hadoop.version.full" value="0.20" />
       <property name="hcatalog.version" value="0.13.0" />
       <property name="hbasecompatprofile" value="1" />
@@ -150,7 +150,7 @@
     <elseif>
       <equals arg1="${hadoopversion}" arg2="100" />
       <then>
-        <property name="hadoop.version" value="1.0.0" />
+        <property name="hadoop.version" value="1.0.4" />
         <property name="hbase94.version" value="0.92.0" />
         <property name="zookeeper.version" value="3.4.2" />
         <property name="hadoop.version.full" value="1.0.0" />

http://git-wip-us.apache.org/repos/asf/sqoop/blob/e1c6e4a7/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java 
b/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
index a93114f..ac8758b 100644
--- a/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java
@@ -155,10 +155,25 @@ public class HBaseImportJob extends DataDrivenImportJob {
       throw new ImportException(
           "Import to HBase error: Column family not specified");
     }
+    Method m = null;
+    try {
+      m = HBaseConfiguration.class.getMethod("merge",
+      Configuration.class, Configuration.class);
+    } catch (NoSuchMethodException nsme) {
+    }
 
-    // Add HBase configuration files to this conf object.
-    Configuration newConf = HBaseConfiguration.create(conf);
-    HBaseConfiguration.merge(conf, newConf);
+    if (m != null) {
+      // Add HBase configuration files to this conf object.
+
+      Configuration newConf = HBaseConfiguration.create(conf);
+      try {
+        m.invoke(null, conf, newConf);
+      } catch (Exception e) {
+        throw new ImportException(e);
+      }
+    } else {
+      HBaseConfiguration.addHbaseResources(conf);
+    }
 
     HBaseAdmin admin = new HBaseAdmin(conf);
 

http://git-wip-us.apache.org/repos/asf/sqoop/blob/e1c6e4a7/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java
----------------------------------------------------------------------
diff --git 
a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java
 
b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java
index 613ee7a..0614154 100644
--- 
a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java
+++ 
b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java
@@ -201,7 +201,7 @@ public class TestMainframeDatasetFTPRecordReader {
     conf.setClass(DBConfiguration.INPUT_CLASS_PROPERTY, DummySqoopRecord.class,
         DBWritable.class);
 
-    Job job = Job.getInstance(conf);
+    Job job = new Job(conf);
     mfDIS = new MainframeDatasetInputSplit();
     mfDIS.addDataset("test1");
     mfDIS.addDataset("test2");

http://git-wip-us.apache.org/repos/asf/sqoop/blob/e1c6e4a7/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java
----------------------------------------------------------------------
diff --git 
a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java
 
b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java
index 70958e0..e386fb0 100644
--- 
a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java
+++ 
b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java
@@ -96,7 +96,7 @@ public class TestMainframeDatasetInputFormat {
 
     String dsName = "dsName1";
     conf.set(MainframeConfiguration.MAINFRAME_INPUT_DATASET_NAME, dsName);
-    Job job = Job.getInstance(conf);
+    Job job = new Job(conf);
     format.getSplits(job);
 
     List<InputSplit> splits = new ArrayList<InputSplit>();

http://git-wip-us.apache.org/repos/asf/sqoop/blob/e1c6e4a7/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java
 
b/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java
index b8c4538..924c116 100644
--- 
a/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java
+++ 
b/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java
@@ -1,13 +1,15 @@
 package org.apache.sqoop.mapreduce.sqlserver;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.lang.reflect.Constructor;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.lib.db.DBConfiguration;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 import org.apache.sqoop.manager.SQLServerManager;
 import org.apache.sqoop.mapreduce.ExportJobBase;
 import 
org.apache.sqoop.mapreduce.sqlserver.SqlServerUpsertOutputFormat.SqlServerUpsertRecordWriter;
@@ -19,7 +21,8 @@ public class SqlServerUpsertOutputFormatTest {
   @Test
   public void Merge_statement_is_parameterized_correctly() throws Exception {
     Configuration conf = new Configuration();
-    conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, 
org.hsqldb.jdbcDriver.class.getName());
+    conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
+      org.hsqldb.jdbcDriver.class.getName());
     conf.set(DBConfiguration.URL_PROPERTY, "jdbc:hsqldb:.");
     conf.set(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY, "");
     conf.set(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, "");
@@ -27,18 +30,49 @@ public class SqlServerUpsertOutputFormatTest {
     String[] columnNames = { "FirstColumn", "SecondColumn", "ThirdColumn" };
     String[] updateKeyColumns = { "FirstColumn" };
     conf.set(DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY, tableName);
-    conf.set(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, 
StringUtils.join(columnNames, ','));
-    conf.set(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY, 
StringUtils.join(updateKeyColumns, ','));
+    conf.set(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY,
+      StringUtils.join(columnNames, ','));
+    conf.set(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY,
+      StringUtils.join(updateKeyColumns, ','));
     conf.set(SQLServerManager.TABLE_HINTS_PROP, "NOLOCK");
     conf.set(SQLServerManager.IDENTITY_INSERT_PROP, "true");
-    TaskAttemptContext context = new TaskAttemptContextImpl(conf, new 
TaskAttemptID());
-    SqlServerUpsertOutputFormat outputFormat = new 
SqlServerUpsertOutputFormat();
-    SqlServerUpsertRecordWriter recordWriter = outputFormat.new 
SqlServerUpsertRecordWriter(context);
-    assertEquals("SET IDENTITY_INSERT #myTable ON " +
-      "MERGE INTO #myTable AS _target USING ( VALUES ( ?, ?, ? ) ) AS _source 
( FirstColumn, SecondColumn, ThirdColumn ) ON _source.FirstColumn = 
_target.FirstColumn" +
-      "  WHEN MATCHED THEN UPDATE SET _target.SecondColumn = 
_source.SecondColumn, _target.ThirdColumn = _source.ThirdColumn" +
-      "  WHEN NOT MATCHED THEN INSERT ( FirstColumn, SecondColumn, ThirdColumn 
) VALUES " +
-      "( _source.FirstColumn, _source.SecondColumn, _source.ThirdColumn ) " +
-      "OPTION (NOLOCK);", recordWriter.getUpdateStatement());
+    TaskAttemptContext context = null;
+    Class cls = null;
+    try {
+      cls =
+        Class
+        .forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
+    }
+    catch(ClassNotFoundException cnfe) {
+      // Not hadoop 2.0
+    }
+    if (cls == null) {
+      try {
+        cls =
+          Class
+          .forName("org.apache.hadoop.mapreduce.task.TaskAttemptContext");
+      }
+      catch(ClassNotFoundException cnfe) {
+        // Something wrong
+      }
+    }
+    assertNotNull(cls);
+    Constructor c = cls.getConstructor(Configuration.class,
+        TaskAttemptID.class);
+     context = (TaskAttemptContext)c.newInstance(conf, new TaskAttemptID());
+    SqlServerUpsertOutputFormat outputFormat =
+        new SqlServerUpsertOutputFormat();
+    SqlServerUpsertRecordWriter recordWriter =
+        outputFormat.new SqlServerUpsertRecordWriter(context);
+    assertEquals("SET IDENTITY_INSERT #myTable ON "
+      + "MERGE INTO #myTable AS _target USING ( VALUES ( ?, ?, ? ) )"
+      + " AS _source ( FirstColumn, SecondColumn, ThirdColumn ) ON "
+      + "_source.FirstColumn = _target.FirstColumn"
+      + "  WHEN MATCHED THEN UPDATE SET _target.SecondColumn = "
+      + "_source.SecondColumn, _target.ThirdColumn = _source.ThirdColumn"
+      + "  WHEN NOT MATCHED THEN INSERT ( FirstColumn, SecondColumn, "
+      + " ThirdColumn ) VALUES "
+      + "( _source.FirstColumn, _source.SecondColumn, _source.ThirdColumn ) "
+      + "OPTION (NOLOCK);", recordWriter.getUpdateStatement());
   }
 }

Reply via email to