Author: hashutosh
Date: Sat Sep 14 02:30:57 2013
New Revision: 1523178
URL: http://svn.apache.org/r1523178
Log:
HIVE-5290 : Some HCatalog tests have been behaving flaky (Brock Noland via
Ashutosh Chauhan)
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatContext.java
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatContext.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatContext.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatContext.java
(original)
+++
hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HCatContext.java
Sat Sep 14 02:30:57 2013
@@ -67,9 +67,11 @@ public enum HCatContext {
}
if (conf != newConf) {
- for (Map.Entry<String, String> entry : conf) {
- if ((entry.getKey().matches("hcat.*")) && (newConf.get(entry.getKey())
== null)) {
- newConf.set(entry.getKey(), entry.getValue());
+ synchronized (conf) {
+ for (Map.Entry<String, String> entry : conf) {
+ if ((entry.getKey().matches("hcat.*")) &&
(newConf.get(entry.getKey()) == null)) {
+ newConf.set(entry.getKey(), entry.getValue());
+ }
}
}
conf = newConf;
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java
(original)
+++
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatContext.java
Sat Sep 14 02:30:57 2013
@@ -65,10 +65,12 @@ public enum HCatContext {
}
if (conf != newConf) {
- for (Map.Entry<String, String> entry : conf) {
- if ((entry.getKey().matches("hcat.*")) && (newConf.get(entry.getKey())
== null)) {
- newConf.set(entry.getKey(), entry.getValue());
- }
+ synchronized (conf) {
+ for (Map.Entry<String, String> entry : conf) {
+ if ((entry.getKey().matches("hcat.*")) &&
(newConf.get(entry.getKey()) == null)) {
+ newConf.set(entry.getKey(), entry.getValue());
+ }
+ }
}
conf = newConf;
}
Modified:
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java
(original)
+++
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatMapRedUtil.java
Sat Sep 14 02:30:57 2013
@@ -27,6 +27,7 @@ import org.apache.hadoop.mapred.JobConte
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TaskAttemptContext;
import org.apache.hadoop.mapred.TaskAttemptID;
+import org.apache.hadoop.mapreduce.JobID;
public class HCatMapRedUtil {
@@ -43,7 +44,9 @@ public class HCatMapRedUtil {
public static TaskAttemptContext createTaskAttemptContext(JobConf conf,
TaskAttemptID id, Progressable progressable) {
return
ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, id,
(Reporter) progressable);
}
-
+ public static org.apache.hadoop.mapreduce.TaskAttemptID
createTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id) {
+ return ShimLoader.getHadoopShims().newTaskAttemptID(jobId, isMap, taskId,
id);
+ }
public static org.apache.hadoop.mapred.JobContext
createJobContext(org.apache.hadoop.mapreduce.JobContext context) {
return createJobContext((JobConf)context.getConfiguration(),
context.getJobID(),
Modified:
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java
(original)
+++
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java
Sat Sep 14 02:30:57 2013
@@ -57,7 +57,7 @@ import org.slf4j.LoggerFactory;
public class TestPermsGrp extends TestCase {
private boolean isServerRunning = false;
- private static final int msPort = 20101;
+ private int msPort;
private HiveConf hcatConf;
private Warehouse clientWH;
private HiveMetaStoreClient msc;
@@ -75,6 +75,7 @@ public class TestPermsGrp extends TestCa
return;
}
+ msPort = MetaStoreUtils.findFreePort();
MetaStoreUtils.startMetaStore(msPort,
ShimLoader.getHadoopThriftAuthBridge());
isServerRunning = true;
@@ -87,6 +88,7 @@ public class TestPermsGrp extends TestCa
hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" +
msPort);
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
+ hcatConf.setIntVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 120);
hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
HCatSemanticAnalyzer.class.getName());
hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
Modified:
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
(original)
+++
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
Sat Sep 14 02:30:57 2013
@@ -27,8 +27,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
-import junit.framework.TestCase;
-
+import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -51,32 +50,40 @@ import org.apache.hcatalog.data.schema.H
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.data.Tuple;
+import org.junit.After;
+import org.junit.Before;
import org.junit.Test;
/**
* @deprecated Use/modify {@link
org.apache.hive.hcatalog.mapreduce.TestSequenceFileReadWrite} instead
*/
-public class TestSequenceFileReadWrite extends TestCase {
- private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
- "/build/test/data/" + TestSequenceFileReadWrite.class.getCanonicalName();
- private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR +
"/warehouse";
- private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
-
- private static Driver driver;
- private static PigServer server;
- private static String[] input;
- private static HiveConf hiveConf;
+public class TestSequenceFileReadWrite {
- public void Initialize() throws Exception {
+ private File dataDir;
+ private String warehouseDir;
+ private String inputFileName;
+ private Driver driver;
+ private PigServer server;
+ private String[] input;
+ private HiveConf hiveConf;
+
+ @Before
+ public void setup() throws Exception {
+ dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator +
+ TestSequenceFileReadWrite.class.getCanonicalName() + "-" +
System.currentTimeMillis());
hiveConf = new HiveConf(this.getClass());
+ warehouseDir = new File(dataDir, "warehouse").getAbsolutePath();
+ inputFileName = new File(dataDir, "input.data").getAbsolutePath();
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
- hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname,
TEST_WAREHOUSE_DIR);
+ hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir);
driver = new Driver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
- new File(TEST_WAREHOUSE_DIR).mkdirs();
+ if(!(new File(warehouseDir).mkdirs())) {
+ throw new RuntimeException("Could not create " + warehouseDir);
+ }
int numRows = 3;
input = new String[numRows];
@@ -85,13 +92,19 @@ public class TestSequenceFileReadWrite e
String col2 = "b" + i;
input[i] = i + "," + col1 + "," + col2;
}
- HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+ HcatTestUtils.createTestDataFile(inputFileName, input);
server = new PigServer(ExecType.LOCAL);
}
+ @After
+ public void teardown() throws IOException {
+ if(dataDir != null) {
+ FileUtils.deleteDirectory(dataDir);
+ }
+ }
+
@Test
public void testSequenceTableWriteRead() throws Exception {
- Initialize();
String createTable = "CREATE TABLE demo_table(a0 int, a1 String, a2
String) STORED AS SEQUENCEFILE";
driver.run("drop table demo_table");
int retCode1 = driver.run(createTable).getResponseCode();
@@ -99,7 +112,7 @@ public class TestSequenceFileReadWrite e
server.setBatchOn();
server.registerQuery("A = load '"
- + INPUT_FILE_NAME
+ + inputFileName
+ "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
server.registerQuery("store A into 'demo_table' using
org.apache.hcatalog.pig.HCatStorer();");
server.executeBatch();
@@ -120,7 +133,6 @@ public class TestSequenceFileReadWrite e
@Test
public void testTextTableWriteRead() throws Exception {
- Initialize();
String createTable = "CREATE TABLE demo_table_1(a0 int, a1 String, a2
String) STORED AS TEXTFILE";
driver.run("drop table demo_table_1");
int retCode1 = driver.run(createTable).getResponseCode();
@@ -128,7 +140,7 @@ public class TestSequenceFileReadWrite e
server.setBatchOn();
server.registerQuery("A = load '"
- + INPUT_FILE_NAME
+ + inputFileName
+ "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
server.registerQuery("store A into 'demo_table_1' using
org.apache.hcatalog.pig.HCatStorer();");
server.executeBatch();
@@ -149,7 +161,6 @@ public class TestSequenceFileReadWrite e
@Test
public void testSequenceTableWriteReadMR() throws Exception {
- Initialize();
String createTable = "CREATE TABLE demo_table_2(a0 int, a1 String, a2
String) STORED AS SEQUENCEFILE";
driver.run("drop table demo_table_2");
int retCode1 = driver.run(createTable).getResponseCode();
@@ -165,7 +176,7 @@ public class TestSequenceFileReadWrite e
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(DefaultHCatRecord.class);
job.setInputFormatClass(TextInputFormat.class);
- TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+ TextInputFormat.setInputPaths(job, inputFileName);
HCatOutputFormat.setOutput(job, OutputJobInfo.create(
MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_2", null));
@@ -196,7 +207,6 @@ public class TestSequenceFileReadWrite e
@Test
public void testTextTableWriteReadMR() throws Exception {
- Initialize();
String createTable = "CREATE TABLE demo_table_3(a0 int, a1 String, a2
String) STORED AS TEXTFILE";
driver.run("drop table demo_table_3");
int retCode1 = driver.run(createTable).getResponseCode();
@@ -213,7 +223,7 @@ public class TestSequenceFileReadWrite e
job.setOutputValueClass(DefaultHCatRecord.class);
job.setInputFormatClass(TextInputFormat.class);
job.setNumReduceTasks(0);
- TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+ TextInputFormat.setInputPaths(job, inputFileName);
HCatOutputFormat.setOutput(job, OutputJobInfo.create(
MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_3", null));
Modified:
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
(original)
+++
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
Sat Sep 14 02:30:57 2013
@@ -55,7 +55,7 @@ import org.slf4j.LoggerFactory;
public class TestPermsGrp extends TestCase {
private boolean isServerRunning = false;
- private static final int msPort = 20101;
+ private int msPort;
private HiveConf hcatConf;
private Warehouse clientWH;
private HiveMetaStoreClient msc;
@@ -72,7 +72,9 @@ public class TestPermsGrp extends TestCa
if (isServerRunning) {
return;
}
-
+
+
+ msPort = MetaStoreUtils.findFreePort();
MetaStoreUtils.startMetaStore(msPort,
ShimLoader.getHadoopThriftAuthBridge());
isServerRunning = true;
@@ -85,6 +87,7 @@ public class TestPermsGrp extends TestCa
hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" +
msPort);
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
+ hcatConf.setIntVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 120);
hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
HCatSemanticAnalyzer.class.getName());
hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -97,7 +100,6 @@ public class TestPermsGrp extends TestCa
System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
}
-
public void testCustomPerms() throws Exception {
String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
Modified:
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
(original)
+++
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
Sat Sep 14 02:30:57 2013
@@ -27,8 +27,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
-import junit.framework.TestCase;
-
+import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -51,29 +50,38 @@ import org.apache.hive.hcatalog.data.sch
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.data.Tuple;
+import org.junit.After;
+import org.junit.Before;
import org.junit.Test;
-public class TestSequenceFileReadWrite extends TestCase {
- private static final String TEST_DATA_DIR =
- "/tmp/build/test/data/" +
TestSequenceFileReadWrite.class.getCanonicalName();
- private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR +
"/warehouse";
- private static final String INPUT_FILE_NAME = TEST_DATA_DIR + "/input.data";
-
- private static Driver driver;
- private static PigServer server;
- private static String[] input;
- private static HiveConf hiveConf;
+public class TestSequenceFileReadWrite {
- public void Initialize() throws Exception {
+ private File dataDir;
+ private String warehouseDir;
+ private String inputFileName;
+ private Driver driver;
+ private PigServer server;
+ private String[] input;
+ private HiveConf hiveConf;
+
+ @Before
+ public void setup() throws Exception {
+ dataDir = new File(System.getProperty("java.io.tmpdir") + File.separator +
+ TestSequenceFileReadWrite.class.getCanonicalName() + "-" +
System.currentTimeMillis());
+ hiveConf = new HiveConf(this.getClass());
+ warehouseDir = new File(dataDir, "warehouse").getAbsolutePath();
+ inputFileName = new File(dataDir, "input.data").getAbsolutePath();
hiveConf = new HiveConf(this.getClass());
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
- hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname,
TEST_WAREHOUSE_DIR);
+ hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir);
driver = new Driver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
- new File(TEST_WAREHOUSE_DIR).mkdirs();
+ if(!(new File(warehouseDir).mkdirs())) {
+ throw new RuntimeException("Could not create " + warehouseDir);
+ }
int numRows = 3;
input = new String[numRows];
@@ -82,13 +90,18 @@ public class TestSequenceFileReadWrite e
String col2 = "b" + i;
input[i] = i + "," + col1 + "," + col2;
}
- HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, input);
+ HcatTestUtils.createTestDataFile(inputFileName, input);
server = new PigServer(ExecType.LOCAL);
}
+ @After
+ public void teardown() throws IOException {
+ if(dataDir != null) {
+ FileUtils.deleteDirectory(dataDir);
+ }
+ }
@Test
public void testSequenceTableWriteRead() throws Exception {
- Initialize();
String createTable = "CREATE TABLE demo_table(a0 int, a1 String, a2
String) STORED AS SEQUENCEFILE";
driver.run("drop table demo_table");
int retCode1 = driver.run(createTable).getResponseCode();
@@ -96,7 +109,7 @@ public class TestSequenceFileReadWrite e
server.setBatchOn();
server.registerQuery("A = load '"
- + INPUT_FILE_NAME
+ + inputFileName
+ "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
server.registerQuery("store A into 'demo_table' using
org.apache.hive.hcatalog.pig.HCatStorer();");
server.executeBatch();
@@ -117,7 +130,6 @@ public class TestSequenceFileReadWrite e
@Test
public void testTextTableWriteRead() throws Exception {
- Initialize();
String createTable = "CREATE TABLE demo_table_1(a0 int, a1 String, a2
String) STORED AS TEXTFILE";
driver.run("drop table demo_table_1");
int retCode1 = driver.run(createTable).getResponseCode();
@@ -125,7 +137,7 @@ public class TestSequenceFileReadWrite e
server.setBatchOn();
server.registerQuery("A = load '"
- + INPUT_FILE_NAME
+ + inputFileName
+ "' using PigStorage(',') as (a0:int,a1:chararray,a2:chararray);");
server.registerQuery("store A into 'demo_table_1' using
org.apache.hive.hcatalog.pig.HCatStorer();");
server.executeBatch();
@@ -146,7 +158,6 @@ public class TestSequenceFileReadWrite e
@Test
public void testSequenceTableWriteReadMR() throws Exception {
- Initialize();
String createTable = "CREATE TABLE demo_table_2(a0 int, a1 String, a2
String) STORED AS SEQUENCEFILE";
driver.run("drop table demo_table_2");
int retCode1 = driver.run(createTable).getResponseCode();
@@ -162,7 +173,7 @@ public class TestSequenceFileReadWrite e
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(DefaultHCatRecord.class);
job.setInputFormatClass(TextInputFormat.class);
- TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+ TextInputFormat.setInputPaths(job, inputFileName);
HCatOutputFormat.setOutput(job, OutputJobInfo.create(
MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_2", null));
@@ -193,7 +204,6 @@ public class TestSequenceFileReadWrite e
@Test
public void testTextTableWriteReadMR() throws Exception {
- Initialize();
String createTable = "CREATE TABLE demo_table_3(a0 int, a1 String, a2
String) STORED AS TEXTFILE";
driver.run("drop table demo_table_3");
int retCode1 = driver.run(createTable).getResponseCode();
@@ -210,7 +220,7 @@ public class TestSequenceFileReadWrite e
job.setOutputValueClass(DefaultHCatRecord.class);
job.setInputFormatClass(TextInputFormat.class);
job.setNumReduceTasks(0);
- TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
+ TextInputFormat.setInputPaths(job, inputFileName);
HCatOutputFormat.setOutput(job, OutputJobInfo.create(
MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_3", null));
Modified:
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java
(original)
+++
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java
Sat Sep 14 02:30:57 2013
@@ -18,6 +18,11 @@
*/
package org.apache.hcatalog.pig;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
@@ -29,8 +34,7 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
-import junit.framework.TestCase;
-
+import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -48,13 +52,16 @@ import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
/**
* @deprecated Use/modify {@link org.apache.hive.hcatalog.pig.TestHCatLoader}
instead
*/
-public class TestHCatLoader extends TestCase {
- private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
- "/build/test/data/" + TestHCatLoader.class.getCanonicalName();
+public class TestHCatLoader {
+ private static final String TEST_DATA_DIR =
System.getProperty("java.io.tmpdir") + File.separator
+ + TestHCatLoader.class.getCanonicalName() + "-" +
System.currentTimeMillis();
private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR +
"/warehouse";
private static final String BASIC_FILE_NAME = TEST_DATA_DIR +
"/basic.input.data";
private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR +
"/complex.input.data";
@@ -63,13 +70,9 @@ public class TestHCatLoader extends Test
private static final String COMPLEX_TABLE = "junit_unparted_complex";
private static final String PARTITIONED_TABLE = "junit_parted_basic";
private static final String SPECIFIC_SIZE_TABLE = "junit_specific_size";
- private static Driver driver;
-
- private static int guardTestCount = 6; // ugh, instantiate using
introspection in guardedSetupBeforeClass
- private static boolean setupHasRun = false;
-
- private static Map<Integer, Pair<Integer, String>> basicInputData;
+ private Driver driver;
+ private Map<Integer, Pair<Integer, String>> basicInputData;
protected String storageFormat() {
return "RCFILE
tblproperties('hcat.isd'='org.apache.hcatalog.rcfile.RCFileInputDriver'," +
@@ -97,18 +100,16 @@ public class TestHCatLoader extends Test
createTable(tablename, schema, null);
}
- protected void guardedSetUpBeforeClass() throws Exception {
- if (!setupHasRun) {
- setupHasRun = true;
- } else {
- return;
- }
+ @Before
+ public void setup() throws Exception {
File f = new File(TEST_WAREHOUSE_DIR);
if (f.exists()) {
FileUtil.fullyDelete(f);
}
- new File(TEST_WAREHOUSE_DIR).mkdirs();
+ if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+ throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
+ }
HiveConf hiveConf = new HiveConf(this.getClass());
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -118,8 +119,6 @@ public class TestHCatLoader extends Test
driver = new Driver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
- cleanup();
-
createTable(BASIC_TABLE, "a int, b string");
createTable(COMPLEX_TABLE,
"name string, studentid int, "
@@ -172,29 +171,16 @@ public class TestHCatLoader extends Test
}
- private void cleanup() throws IOException, CommandNeedRetryException {
- dropTable(BASIC_TABLE);
- dropTable(COMPLEX_TABLE);
- dropTable(PARTITIONED_TABLE);
- dropTable(SPECIFIC_SIZE_TABLE);
- }
-
- protected void guardedTearDownAfterClass() throws Exception {
- guardTestCount--;
- if (guardTestCount > 0) {
- return;
+ @After
+ public void tearDown() throws Exception {
+ try {
+ dropTable(BASIC_TABLE);
+ dropTable(COMPLEX_TABLE);
+ dropTable(PARTITIONED_TABLE);
+ dropTable(SPECIFIC_SIZE_TABLE);
+ } finally {
+ FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
}
- cleanup();
- }
-
- @Override
- protected void setUp() throws Exception {
- guardedSetUpBeforeClass();
- }
-
- @Override
- protected void tearDown() throws Exception {
- guardedTearDownAfterClass();
}
public void testSchemaLoadBasic() throws IOException {
@@ -213,6 +199,7 @@ public class TestHCatLoader extends Test
}
+ @Test
public void testReadDataBasic() throws IOException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -230,7 +217,7 @@ public class TestHCatLoader extends Test
}
assertEquals(basicInputData.size(), numTuplesRead);
}
-
+ @Test
public void testSchemaLoadComplex() throws IOException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -287,7 +274,7 @@ public class TestHCatLoader extends Test
}
}
-
+ @Test
public void testReadPartitionedBasic() throws IOException,
CommandNeedRetryException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -350,7 +337,7 @@ public class TestHCatLoader extends Test
}
assertEquals(6, count2);
}
-
+ @Test
public void testProjectionsBasic() throws IOException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -395,21 +382,21 @@ public class TestHCatLoader extends Test
}
assertEquals(basicInputData.size(), numTuplesRead);
}
-
+ @Test
public void testGetInputBytes() throws Exception {
File file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE +
"/part-m-00000");
file.deleteOnExit();
RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");
randomAccessFile.setLength(2L * 1024 * 1024 * 1024);
-
+ randomAccessFile.close();
Job job = new Job();
HCatLoader hCatLoader = new HCatLoader();
- hCatLoader.setUDFContextSignature(this.getName());
+ hCatLoader.setUDFContextSignature("testGetInputBytes");
hCatLoader.setLocation(SPECIFIC_SIZE_TABLE, job);
ResourceStatistics statistics =
hCatLoader.getStatistics(file.getAbsolutePath(), job);
assertEquals(2048, (long) statistics.getmBytes());
}
-
+ @Test
public void testConvertBooleanToInt() throws Exception {
String tbl = "test_convert_boolean_to_int";
String inputFileName = TEST_DATA_DIR + "/testConvertBooleanToInt/data.txt";
Modified:
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
(original)
+++
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
Sat Sep 14 02:30:57 2013
@@ -210,7 +210,7 @@ public class TestHCatLoaderComplexSchema
while (it.hasNext()) {
Tuple input = data.get(i++);
Tuple output = it.next();
- Assert.assertEquals(input.toString(), output.toString());
+ compareTuples(input, output);
LOG.info("tuple : {} ", output);
}
Schema dumpedXSchema = server.dumpSchema("X");
@@ -224,6 +224,23 @@ public class TestHCatLoaderComplexSchema
dropTable(tablename);
}
}
+ private void compareTuples(Tuple t1, Tuple t2) throws ExecException {
+ Assert.assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
+ for (int i = 0; i < t1.size(); i++) {
+ Object f1 = t1.get(i);
+ Object f2 = t2.get(i);
+ Assert.assertNotNull("left", f1);
+ Assert.assertNotNull("right", f2);
+ String msg = "right: " + f1 + ", left: " + f2;
+ Assert.assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
+ }
+ }
+
+ private String noOrder(String s) {
+ char[] chars = s.toCharArray();
+ Arrays.sort(chars);
+ return new String(chars);
+ }
private String compareIgnoreFiledNames(Schema expected, Schema got) throws
FrontendException {
if (expected == null || got == null) {
Modified:
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
(original)
+++
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestE2EScenarios.java
Sat Sep 14 02:30:57 2013
@@ -25,6 +25,7 @@ import java.util.Iterator;
import junit.framework.TestCase;
+import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hive.cli.CliSessionState;
@@ -35,6 +36,7 @@ import org.apache.hadoop.hive.ql.session
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.RecordWriter;
@@ -55,8 +57,8 @@ import org.apache.pig.data.Tuple;
public class TestE2EScenarios extends TestCase {
- private static final String TEST_DATA_DIR = System.getProperty("user.dir") +
- "/build/test/data/" + TestHCatLoader.class.getCanonicalName();
+ private static final String TEST_DATA_DIR =
System.getProperty("java.io.tmpdir") + File.separator
+ + TestHCatLoader.class.getCanonicalName() + "-" +
System.currentTimeMillis();
private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR +
"/warehouse";
private static final String TEXTFILE_LOCN = TEST_DATA_DIR + "/textfile";
@@ -74,7 +76,9 @@ public class TestE2EScenarios extends Te
if (f.exists()) {
FileUtil.fullyDelete(f);
}
- new File(TEST_WAREHOUSE_DIR).mkdirs();
+ if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+ throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
+ }
HiveConf hiveConf = new HiveConf(this.getClass());
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -88,9 +92,13 @@ public class TestE2EScenarios extends Te
@Override
protected void tearDown() throws Exception {
- dropTable("inpy");
- dropTable("rc5318");
- dropTable("orc5318");
+ try {
+ dropTable("inpy");
+ dropTable("rc5318");
+ dropTable("orc5318");
+ } finally {
+ FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
+ }
}
private void dropTable(String tablename) throws IOException,
CommandNeedRetryException {
@@ -191,9 +199,9 @@ public class TestE2EScenarios extends Te
private TaskAttemptContext createTaskAttemptContext(Configuration tconf) {
Configuration conf = (tconf == null) ? (new Configuration()) : tconf;
- TaskAttemptID taskId = new TaskAttemptID();
+ TaskAttemptID taskId = HCatMapRedUtil.createTaskAttemptID(new
JobID("200908190029", 1), false, 1, 1);
conf.setInt("mapred.task.partition", taskId.getId());
- conf.set("mapred.task.id", "attempt__0000_r_000000_" + taskId.getId());
+ conf.set("mapred.task.id", taskId.toString());
TaskAttemptContext rtaskContext =
HCatMapRedUtil.createTaskAttemptContext(conf , taskId);
return rtaskContext;
}
Modified:
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
(original)
+++
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
Sat Sep 14 02:30:57 2013
@@ -18,6 +18,11 @@
*/
package org.apache.hive.hcatalog.pig;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
@@ -29,8 +34,7 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
-import junit.framework.TestCase;
-
+import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -48,10 +52,13 @@ import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
-
-public class TestHCatLoader extends TestCase {
- private static final String TEST_DATA_DIR =
- "/tmp/build/test/data/" + TestHCatLoader.class.getCanonicalName();
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestHCatLoader {
+ private static final String TEST_DATA_DIR =
System.getProperty("java.io.tmpdir") + File.separator
+ + TestHCatLoader.class.getCanonicalName() + "-" +
System.currentTimeMillis();
private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR +
"/warehouse";
private static final String BASIC_FILE_NAME = TEST_DATA_DIR +
"/basic.input.data";
private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR +
"/complex.input.data";
@@ -60,13 +67,9 @@ public class TestHCatLoader extends Test
private static final String COMPLEX_TABLE = "junit_unparted_complex";
private static final String PARTITIONED_TABLE = "junit_parted_basic";
private static final String SPECIFIC_SIZE_TABLE = "junit_specific_size";
- private static Driver driver;
-
- private static int guardTestCount = 6; // ugh, instantiate using
introspection in guardedSetupBeforeClass
- private static boolean setupHasRun = false;
-
- private static Map<Integer, Pair<Integer, String>> basicInputData;
+ private Driver driver;
+ private Map<Integer, Pair<Integer, String>> basicInputData;
protected String storageFormat() {
return "RCFILE
tblproperties('hcat.isd'='org.apache.hive.hcatalog.rcfile.RCFileInputDriver'," +
@@ -94,18 +97,16 @@ public class TestHCatLoader extends Test
createTable(tablename, schema, null);
}
- protected void guardedSetUpBeforeClass() throws Exception {
- if (!setupHasRun) {
- setupHasRun = true;
- } else {
- return;
- }
+ @Before
+ public void setup() throws Exception {
File f = new File(TEST_WAREHOUSE_DIR);
if (f.exists()) {
FileUtil.fullyDelete(f);
}
- new File(TEST_WAREHOUSE_DIR).mkdirs();
+ if(!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
+ throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
+ }
HiveConf hiveConf = new HiveConf(this.getClass());
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
@@ -115,8 +116,6 @@ public class TestHCatLoader extends Test
driver = new Driver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
- cleanup();
-
createTable(BASIC_TABLE, "a int, b string");
createTable(COMPLEX_TABLE,
"name string, studentid int, "
@@ -169,31 +168,19 @@ public class TestHCatLoader extends Test
}
- private void cleanup() throws IOException, CommandNeedRetryException {
- dropTable(BASIC_TABLE);
- dropTable(COMPLEX_TABLE);
- dropTable(PARTITIONED_TABLE);
- dropTable(SPECIFIC_SIZE_TABLE);
- }
-
- protected void guardedTearDownAfterClass() throws Exception {
- guardTestCount--;
- if (guardTestCount > 0) {
- return;
+ @After
+ public void tearDown() throws Exception {
+ try {
+ dropTable(BASIC_TABLE);
+ dropTable(COMPLEX_TABLE);
+ dropTable(PARTITIONED_TABLE);
+ dropTable(SPECIFIC_SIZE_TABLE);
+ } finally {
+ FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
}
- cleanup();
- }
-
- @Override
- protected void setUp() throws Exception {
- guardedSetUpBeforeClass();
- }
-
- @Override
- protected void tearDown() throws Exception {
- guardedTearDownAfterClass();
}
+ @Test
public void testSchemaLoadBasic() throws IOException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -210,6 +197,7 @@ public class TestHCatLoader extends Test
}
+ @Test
public void testReadDataBasic() throws IOException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -228,6 +216,7 @@ public class TestHCatLoader extends Test
assertEquals(basicInputData.size(), numTuplesRead);
}
+ @Test
public void testSchemaLoadComplex() throws IOException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -285,6 +274,7 @@ public class TestHCatLoader extends Test
}
+ @Test
public void testReadPartitionedBasic() throws IOException,
CommandNeedRetryException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -348,6 +338,7 @@ public class TestHCatLoader extends Test
assertEquals(6, count2);
}
+ @Test
public void testProjectionsBasic() throws IOException {
PigServer server = new PigServer(ExecType.LOCAL);
@@ -393,20 +384,22 @@ public class TestHCatLoader extends Test
assertEquals(basicInputData.size(), numTuplesRead);
}
+ @Test
public void testGetInputBytes() throws Exception {
File file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE +
"/part-m-00000");
file.deleteOnExit();
RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");
randomAccessFile.setLength(2L * 1024 * 1024 * 1024);
-
+ randomAccessFile.close();
Job job = new Job();
HCatLoader hCatLoader = new HCatLoader();
- hCatLoader.setUDFContextSignature(this.getName());
+ hCatLoader.setUDFContextSignature("testGetInputBytes");
hCatLoader.setLocation(SPECIFIC_SIZE_TABLE, job);
ResourceStatistics statistics =
hCatLoader.getStatistics(file.getAbsolutePath(), job);
assertEquals(2048, (long) statistics.getmBytes());
}
+ @Test
public void testConvertBooleanToInt() throws Exception {
String tbl = "test_convert_boolean_to_int";
String inputFileName = TEST_DATA_DIR + "/testConvertBooleanToInt/data.txt";
Modified:
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL:
http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
(original)
+++
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
Sat Sep 14 02:30:57 2013
@@ -211,7 +211,7 @@ public class TestHCatLoaderComplexSchema
while (it.hasNext()) {
Tuple input = data.get(i++);
Tuple output = it.next();
- Assert.assertEquals(input.toString(), output.toString());
+ compareTuples(input, output);
LOG.info("tuple : {} ", output);
}
Schema dumpedXSchema = server.dumpSchema("X");
@@ -225,6 +225,24 @@ public class TestHCatLoaderComplexSchema
dropTable(tablename);
}
}
+
+ private void compareTuples(Tuple t1, Tuple t2) throws ExecException {
+ Assert.assertEquals("Tuple Sizes don't match", t1.size(), t2.size());
+ for (int i = 0; i < t1.size(); i++) {
+ Object f1 = t1.get(i);
+ Object f2 = t2.get(i);
+ Assert.assertNotNull("left", f1);
+ Assert.assertNotNull("right", f2);
+ String msg = "right: " + f1 + ", left: " + f2;
+ Assert.assertEquals(msg, noOrder(f1.toString()), noOrder(f2.toString()));
+ }
+ }
+
+ private String noOrder(String s) {
+ char[] chars = s.toCharArray();
+ Arrays.sort(chars);
+ return new String(chars);
+ }
private String compareIgnoreFiledNames(Schema expected, Schema got) throws
FrontendException {
if (expected == null || got == null) {
Modified:
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL:
http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
(original)
+++
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
Sat Sep 14 02:30:57 2013
@@ -67,6 +67,7 @@ import org.apache.hadoop.mapred.lib.Comb
import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapred.lib.TotalOrderPartitioner;
import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UnixUserGroupInformation;
@@ -711,6 +712,11 @@ public class Hadoop20Shims implements Ha
}
@Override
+ public TaskAttemptID newTaskAttemptID(JobID jobId, boolean isMap, int
taskId, int id) {
+ return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), isMap,
taskId, id);
+ }
+
+ @Override
public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
return new org.apache.hadoop.mapreduce.JobContext(job.getConfiguration(),
job.getJobID());
}
Modified:
hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL:
http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
(original)
+++
hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
Sat Sep 14 02:30:57 2013
@@ -92,6 +92,11 @@ public class Hadoop20SShims extends Hado
}
@Override
+ public TaskAttemptID newTaskAttemptID(JobID jobId, boolean isMap, int
taskId, int id) {
+ return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), isMap,
taskId, id);
+ }
+
+ @Override
public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
return new org.apache.hadoop.mapreduce.JobContext(job.getConfiguration(),
job.getJobID());
}
Modified:
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL:
http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
(original)
+++
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
Sat Sep 14 02:30:57 2013
@@ -106,6 +106,11 @@ public class Hadoop23Shims extends Hadoo
}
@Override
+ public TaskAttemptID newTaskAttemptID(JobID jobId, boolean isMap, int
taskId, int id) {
+ return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), isMap ?
TaskType.MAP : TaskType.REDUCE, taskId, id);
+ }
+
+ @Override
public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
return new JobContextImpl(job.getConfiguration(), job.getJobID());
}
Modified:
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL:
http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1523178&r1=1523177&r2=1523178&view=diff
==============================================================================
---
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
(original)
+++
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
Sat Sep 14 02:30:57 2013
@@ -318,6 +318,8 @@ public interface HadoopShims {
public TaskAttemptContext newTaskAttemptContext(Configuration conf, final
Progressable progressable);
+ public TaskAttemptID newTaskAttemptID(JobID jobId, boolean isMap, int
taskId, int id);
+
public JobContext newJobContext(Job job);
/**