http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/orm/TestClassWriter.java ---------------------------------------------------------------------- diff --git a/src/test/com/cloudera/sqoop/orm/TestClassWriter.java b/src/test/com/cloudera/sqoop/orm/TestClassWriter.java deleted file mode 100644 index b3a8a17..0000000 --- a/src/test/com/cloudera/sqoop/orm/TestClassWriter.java +++ /dev/null @@ -1,737 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.orm; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.sql.Connection; -import java.sql.Statement; -import java.sql.SQLException; -import java.util.Random; -import java.util.jar.JarEntry; -import java.util.jar.JarInputStream; - - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; - -import com.cloudera.sqoop.SqoopOptions; -import com.cloudera.sqoop.TestConnFactory.DummyManager; -import com.cloudera.sqoop.manager.ConnManager; -import com.cloudera.sqoop.testutil.DirUtil; -import com.cloudera.sqoop.testutil.HsqldbTestServer; -import com.cloudera.sqoop.testutil.ImportJobTestCase; -import com.cloudera.sqoop.tool.ImportTool; -import com.cloudera.sqoop.util.ClassLoaderStack; -import org.junit.rules.ExpectedException; - -import java.lang.reflect.Field; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -/** - * Test that the ClassWriter generates Java classes based on the given table, - * which compile. - */ -public class TestClassWriter { - - public static final Log LOG = - LogFactory.getLog(TestClassWriter.class.getName()); - private static final String WIDE_TABLE_NAME = "WIDETABLE"; - private static final int WIDE_TABLE_COLUMN_COUNT = 800; - private static final int WIDE_TABLE_ROW_COUNT = 20_000; - - // instance variables populated during setUp, used during tests - private HsqldbTestServer testServer; - private ConnManager manager; - private SqoopOptions options; - - @Rule - public ExpectedException thrown = ExpectedException.none(); - - @Before - public void setUp() { - testServer = new HsqldbTestServer(); - org.apache.log4j.Logger root = org.apache.log4j.Logger.getRootLogger(); - root.setLevel(org.apache.log4j.Level.DEBUG); - try { - testServer.resetServer(); - } catch (SQLException sqlE) { - LOG.error("Got SQLException: " + sqlE.toString()); - fail("Got SQLException: " + sqlE.toString()); - } catch (ClassNotFoundException cnfe) { - LOG.error("Could not find class for db driver: " + cnfe.toString()); - fail("Could not find class for db driver: " + cnfe.toString()); - } - - manager = testServer.getManager(); - options = testServer.getSqoopOptions(); - - // sanity check: make sure we're in a tmp dir before we blow anything away. - assertTrue("Test generates code in non-tmp dir!", - CODE_GEN_DIR.startsWith(ImportJobTestCase.TEMP_BASE_DIR)); - assertTrue("Test generates jars in non-tmp dir!", - JAR_GEN_DIR.startsWith(ImportJobTestCase.TEMP_BASE_DIR)); - - // start out by removing these directories ahead of time - // to ensure that this is truly generating the code. - File codeGenDirFile = new File(CODE_GEN_DIR); - File classGenDirFile = new File(JAR_GEN_DIR); - - if (codeGenDirFile.exists()) { - LOG.debug("Removing code gen dir: " + codeGenDirFile); - if (!DirUtil.deleteDir(codeGenDirFile)) { - LOG.warn("Could not delete " + codeGenDirFile + " prior to test"); - } - } - - if (classGenDirFile.exists()) { - LOG.debug("Removing class gen dir: " + classGenDirFile); - if (!DirUtil.deleteDir(classGenDirFile)) { - LOG.warn("Could not delete " + classGenDirFile + " prior to test"); - } - } - } - - @After - public void tearDown() { - try { - manager.close(); - } catch (SQLException sqlE) { - LOG.error("Got SQLException: " + sqlE.toString()); - fail("Got SQLException: " + sqlE.toString()); - } - } - - static final String CODE_GEN_DIR = ImportJobTestCase.TEMP_BASE_DIR - + "sqoop/test/codegen"; - static final String JAR_GEN_DIR = ImportJobTestCase.TEMP_BASE_DIR - + "sqoop/test/jargen"; - - private File runGenerationTest(String[] argv, String classNameToCheck) { - return runGenerationTest(argv, classNameToCheck, HsqldbTestServer.getTableName()); - } - - /** - * Run a test to verify that we can generate code and it emits the output - * files where we expect them. - * @return - */ - private File runGenerationTest(String[] argv, String classNameToCheck, String tableName) { - File codeGenDirFile = new File(CODE_GEN_DIR); - File classGenDirFile = new File(JAR_GEN_DIR); - - try { - options = new ImportTool().parseArguments(argv, - null, options, true); - } catch (Exception e) { - LOG.error("Could not parse options: " + e.toString()); - } - - CompilationManager compileMgr = new CompilationManager(options); - ClassWriter writer = new ClassWriter(options, manager, - tableName, compileMgr); - - try { - writer.generate(); - compileMgr.compile(); - compileMgr.jar(); - } catch (IOException ioe) { - LOG.error("Got IOException: " + ioe.toString()); - fail("Got IOException: " + ioe.toString()); - } - - String classFileNameToCheck = classNameToCheck.replace('.', - File.separatorChar); - LOG.debug("Class file to check for: " + classFileNameToCheck); - - // Check that all the files we expected to generate (.java, .class, .jar) - // exist. - File tableFile = new File(codeGenDirFile, classFileNameToCheck + ".java"); - assertTrue("Cannot find generated source file for table!", - tableFile.exists()); - LOG.debug("Found generated source: " + tableFile); - - File tableClassFile = new File(classGenDirFile, classFileNameToCheck - + ".class"); - assertTrue("Cannot find generated class file for table!", - tableClassFile.exists()); - LOG.debug("Found generated class: " + tableClassFile); - - File jarFile = new File(compileMgr.getJarFilename()); - assertTrue("Cannot find compiled jar", jarFile.exists()); - LOG.debug("Found generated jar: " + jarFile); - - // check that the .class file made it into the .jar by enumerating - // available entries in the jar file. - boolean foundCompiledClass = false; - if (Shell.WINDOWS) { - // In Windows OS, elements in jar files still need to have a path - // separator of '/' rather than the default File.separator which is '\' - classFileNameToCheck = classFileNameToCheck.replace(File.separator, "/"); - } - try { - JarInputStream jis = new JarInputStream(new FileInputStream(jarFile)); - - LOG.debug("Jar file has entries:"); - while (true) { - JarEntry entry = jis.getNextJarEntry(); - if (null == entry) { - // no more entries. - break; - } - - if (entry.getName().equals(classFileNameToCheck + ".class")) { - foundCompiledClass = true; - LOG.debug(" * " + entry.getName()); - } else { - LOG.debug(" " + entry.getName()); - } - } - - jis.close(); - } catch (IOException ioe) { - fail("Got IOException iterating over Jar file: " + ioe.toString()); - } - - assertTrue("Cannot find .class file " + classFileNameToCheck - + ".class in jar file", foundCompiledClass); - - LOG.debug("Found class in jar - test success!"); - return jarFile; - } - - /** - * Test that we can generate code. Test that we can redirect the --outdir - * and --bindir too. - */ - @Test - public void testCodeGen() { - - // Set the option strings in an "argv" to redirect our srcdir and bindir. - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - }; - - runGenerationTest(argv, HsqldbTestServer.getTableName()); - } - - private static final String OVERRIDE_CLASS_NAME = "override"; - - /** - * Test that we can generate code with a custom class name. - */ - @Test - public void testSetClassName() { - - // Set the option strings in an "argv" to redirect our srcdir and bindir - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - "--class-name", - OVERRIDE_CLASS_NAME, - }; - - runGenerationTest(argv, OVERRIDE_CLASS_NAME); - } - - private static final String OVERRIDE_CLASS_AND_PACKAGE_NAME = - "override.pkg.prefix.classname"; - - /** - * Test that we can generate code with a custom class name that includes a - * package. - */ - @Test - public void testSetClassAndPackageName() { - - // Set the option strings in an "argv" to redirect our srcdir and bindir - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - "--class-name", - OVERRIDE_CLASS_AND_PACKAGE_NAME, - }; - - runGenerationTest(argv, OVERRIDE_CLASS_AND_PACKAGE_NAME); - } - - private static final String OVERRIDE_PACKAGE_NAME = - "special.userpackage.name"; - - /** - * Test that we can generate code with a custom class name that includes a - * package. - */ - @Test - public void testSetPackageName() { - - // Set the option strings in an "argv" to redirect our srcdir and bindir - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - "--package-name", - OVERRIDE_PACKAGE_NAME, - }; - - runGenerationTest(argv, OVERRIDE_PACKAGE_NAME + "." - + HsqldbTestServer.getTableName()); - } - - - // Test the SQL identifier -> Java identifier conversion. - @Test - public void testJavaIdentifierConversion() { - assertNull(ClassWriter.getIdentifierStrForChar(' ')); - assertNull(ClassWriter.getIdentifierStrForChar('\t')); - assertNull(ClassWriter.getIdentifierStrForChar('\r')); - assertNull(ClassWriter.getIdentifierStrForChar('\n')); - assertEquals("x", ClassWriter.getIdentifierStrForChar('x')); - assertEquals("_", ClassWriter.getIdentifierStrForChar('-')); - assertEquals("_", ClassWriter.getIdentifierStrForChar('_')); - - assertEquals("foo", ClassWriter.toJavaIdentifier("foo")); - - assertEquals("_abstract", ClassWriter.toJavaIdentifier("abstract")); - assertEquals("_assert", ClassWriter.toJavaIdentifier("assert")); - assertEquals("_boolean", ClassWriter.toJavaIdentifier("boolean")); - assertEquals("_break", ClassWriter.toJavaIdentifier("break")); - assertEquals("_byte", ClassWriter.toJavaIdentifier("byte")); - assertEquals("_case", ClassWriter.toJavaIdentifier("case")); - assertEquals("_catch", ClassWriter.toJavaIdentifier("catch")); - assertEquals("_char", ClassWriter.toJavaIdentifier("char")); - assertEquals("_class", ClassWriter.toJavaIdentifier("class")); - assertEquals("_const", ClassWriter.toJavaIdentifier("const")); - assertEquals("_continue", ClassWriter.toJavaIdentifier("continue")); - assertEquals("_default", ClassWriter.toJavaIdentifier("default")); - assertEquals("_do", ClassWriter.toJavaIdentifier("do")); - assertEquals("_double", ClassWriter.toJavaIdentifier("double")); - assertEquals("_else", ClassWriter.toJavaIdentifier("else")); - assertEquals("_enum", ClassWriter.toJavaIdentifier("enum")); - assertEquals("_extends", ClassWriter.toJavaIdentifier("extends")); - assertEquals("_false", ClassWriter.toJavaIdentifier("false")); - assertEquals("_final", ClassWriter.toJavaIdentifier("final")); - assertEquals("_finally", ClassWriter.toJavaIdentifier("finally")); - assertEquals("_float", ClassWriter.toJavaIdentifier("float")); - assertEquals("_for", ClassWriter.toJavaIdentifier("for")); - assertEquals("_goto", ClassWriter.toJavaIdentifier("goto")); - assertEquals("_if", ClassWriter.toJavaIdentifier("if")); - assertEquals("_implements", ClassWriter.toJavaIdentifier("implements")); - assertEquals("_import", ClassWriter.toJavaIdentifier("import")); - assertEquals("_instanceof", ClassWriter.toJavaIdentifier("instanceof")); - assertEquals("_int", ClassWriter.toJavaIdentifier("int")); - assertEquals("_interface", ClassWriter.toJavaIdentifier("interface")); - assertEquals("_long", ClassWriter.toJavaIdentifier("long")); - assertEquals("_native", ClassWriter.toJavaIdentifier("native")); - assertEquals("_new", ClassWriter.toJavaIdentifier("new")); - assertEquals("_null", ClassWriter.toJavaIdentifier("null")); - assertEquals("_package", ClassWriter.toJavaIdentifier("package")); - assertEquals("_private", ClassWriter.toJavaIdentifier("private")); - assertEquals("_protected", ClassWriter.toJavaIdentifier("protected")); - assertEquals("_public", ClassWriter.toJavaIdentifier("public")); - assertEquals("_return", ClassWriter.toJavaIdentifier("return")); - assertEquals("_short", ClassWriter.toJavaIdentifier("short")); - assertEquals("_static", ClassWriter.toJavaIdentifier("static")); - assertEquals("_strictfp", ClassWriter.toJavaIdentifier("strictfp")); - assertEquals("_super", ClassWriter.toJavaIdentifier("super")); - assertEquals("_switch", ClassWriter.toJavaIdentifier("switch")); - assertEquals("_synchronized", ClassWriter.toJavaIdentifier("synchronized")); - assertEquals("_this", ClassWriter.toJavaIdentifier("this")); - assertEquals("_throw", ClassWriter.toJavaIdentifier("throw")); - assertEquals("_throws", ClassWriter.toJavaIdentifier("throws")); - assertEquals("_transient", ClassWriter.toJavaIdentifier("transient")); - assertEquals("_true", ClassWriter.toJavaIdentifier("true")); - assertEquals("_try", ClassWriter.toJavaIdentifier("try")); - assertEquals("_void", ClassWriter.toJavaIdentifier("void")); - assertEquals("_volatile", ClassWriter.toJavaIdentifier("volatile")); - assertEquals("_while", ClassWriter.toJavaIdentifier("while")); - - assertEquals("_class", ClassWriter.toJavaIdentifier("cla ss")); - assertEquals("_int", ClassWriter.toJavaIdentifier("int")); - assertEquals("thisismanywords", ClassWriter.toJavaIdentifier( - "this is many words")); - assertEquals("_9isLegalInSql", ClassWriter.toJavaIdentifier( - "9isLegalInSql")); - assertEquals("____", ClassWriter.toJavaIdentifier("___")); - assertEquals("__class", ClassWriter.toJavaIdentifier("_class")); - //Checking Java identifier for Constant PROTOCOL_VERSION - assertEquals("_PROTOCOL_VERSION", ClassWriter.toJavaIdentifier("PROTOCOL_VERSION")); - } - - @Test - public void testWeirdColumnNames() throws SQLException { - // Recreate the table with column names that aren't legal Java identifiers. - String tableName = HsqldbTestServer.getTableName(); - Connection connection = testServer.getConnection(); - Statement st = connection.createStatement(); - try { - st.executeUpdate("DROP TABLE " + tableName + " IF EXISTS"); - st.executeUpdate("CREATE TABLE " + tableName - + " (class INT, \"9field\" INT)"); - st.executeUpdate("INSERT INTO " + tableName + " VALUES(42, 41)"); - connection.commit(); - } finally { - st.close(); - connection.close(); - } - - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - "--package-name", - OVERRIDE_PACKAGE_NAME, - }; - - runGenerationTest(argv, OVERRIDE_PACKAGE_NAME + "." - + HsqldbTestServer.getTableName()); - } - - // Test For checking Codegneration perfroming successfully - // in case of Table with Column name as PROTOCOL_VERSION - @Test - public void testColumnNameAsProtocolVersion() throws SQLException { - // Recreate the table with column name as PROTOCOL_VERSION. - String tableName = HsqldbTestServer.getTableName(); - Connection connection = testServer.getConnection(); - Statement st = connection.createStatement(); - try { - st.executeUpdate("DROP TABLE " + tableName + " IF EXISTS"); - st.executeUpdate("CREATE TABLE " + tableName - + " (PROTOCOL_VERSION INT)"); - st.executeUpdate("INSERT INTO " + tableName + " VALUES(42)"); - connection.commit(); - } finally { - st.close(); - connection.close(); - } - - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - "--package-name", - OVERRIDE_PACKAGE_NAME, - }; - - runGenerationTest(argv, OVERRIDE_PACKAGE_NAME + "." - + HsqldbTestServer.getTableName()); - } - - @Test - public void testCloningTableWithVarbinaryDoesNotThrowNPE() throws SQLException, - IOException, ClassNotFoundException, NoSuchMethodException, - SecurityException, InstantiationException, IllegalAccessException, - IllegalArgumentException, InvocationTargetException { - String tableName = HsqldbTestServer.getTableName(); - Connection connection = testServer.getConnection(); - Statement st = connection.createStatement(); - try { - st.executeUpdate("DROP TABLE " + tableName + " IF EXISTS"); - st.executeUpdate("CREATE TABLE " + tableName - + " (id INT, test VARBINARY(10))"); - connection.commit(); - } finally { - st.close(); - connection.close(); - } - - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - "--package-name", - OVERRIDE_PACKAGE_NAME, - }; - - String className = OVERRIDE_PACKAGE_NAME + "." - + HsqldbTestServer.getTableName(); - File ormJarFile = runGenerationTest(argv, className); - - ClassLoader prevClassLoader = ClassLoaderStack.addJarFile( - ormJarFile.getCanonicalPath(), className); - Class tableClass = Class.forName(className, true, - Thread.currentThread().getContextClassLoader()); - Method cloneImplementation = tableClass.getMethod("clone"); - - Object instance = tableClass.newInstance(); - - assertTrue(cloneImplementation.invoke(instance).getClass(). - getCanonicalName().equals(className)); - - if (null != prevClassLoader) { - ClassLoaderStack.setCurrentClassLoader(prevClassLoader); - } - } - - /** - * Test the generated equals method. - * @throws IOException - * @throws ClassNotFoundException - * @throws IllegalAccessException - * @throws InstantiationException - * @throws NoSuchMethodException - * @throws SecurityException - * @throws InvocationTargetException - * @throws IllegalArgumentException - */ - @Test - public void testEqualsMethod() throws IOException, ClassNotFoundException, - InstantiationException, IllegalAccessException, NoSuchMethodException, - InvocationTargetException { - - // Set the option strings in an "argv" to redirect our srcdir and bindir - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - "--class-name", - OVERRIDE_CLASS_AND_PACKAGE_NAME, - }; - - File ormJarFile = runGenerationTest(argv, OVERRIDE_CLASS_AND_PACKAGE_NAME); - ClassLoader prevClassLoader = ClassLoaderStack.addJarFile( - ormJarFile.getCanonicalPath(), - OVERRIDE_CLASS_AND_PACKAGE_NAME); - Class tableClass = Class.forName( - OVERRIDE_CLASS_AND_PACKAGE_NAME, - true, - Thread.currentThread().getContextClassLoader()); - Method setterIntField1 = - tableClass.getMethod("set_INTFIELD1", Integer.class); - Method setterIntField2 = - tableClass.getMethod("set_INTFIELD2", Integer.class); - Method equalsImplementation = tableClass.getMethod("equals", Object.class); - - Object instance1 = tableClass.newInstance(); - Object instance2 = tableClass.newInstance(); - - // test reflexivity - assertTrue((Boolean) equalsImplementation.invoke(instance1, instance1)); - - // test equality for uninitialized fields - assertTrue((Boolean) equalsImplementation.invoke(instance1, instance2)); - - // test symmetry - assertTrue((Boolean) equalsImplementation.invoke(instance2, instance1)); - - // test reflexivity with initialized fields - setterIntField1.invoke(instance1, new Integer(1)); - setterIntField2.invoke(instance1, new Integer(2)); - assertTrue((Boolean) equalsImplementation.invoke(instance1, instance1)); - - // test difference in both fields - setterIntField1.invoke(instance2, new Integer(3)); - setterIntField2.invoke(instance2, new Integer(4)); - assertFalse((Boolean) equalsImplementation.invoke(instance1, instance2)); - - // test difference in second field - setterIntField1.invoke(instance2, new Integer(1)); - setterIntField2.invoke(instance2, new Integer(3)); - assertFalse((Boolean) equalsImplementation.invoke(instance1, instance2)); - - // test difference in first field - setterIntField1.invoke(instance2, new Integer(3)); - setterIntField2.invoke(instance2, new Integer(2)); - assertFalse((Boolean) equalsImplementation.invoke(instance1, instance2)); - - // test equality for initialized fields - setterIntField1.invoke(instance2, new Integer(1)); - setterIntField2.invoke(instance2, new Integer(2)); - assertTrue((Boolean) equalsImplementation.invoke(instance1, instance2)); - - if (null != prevClassLoader) { - ClassLoaderStack.setCurrentClassLoader(prevClassLoader); - } - } - - private static final String USERMAPPING_CLASS_AND_PACKAGE_NAME = - "usermapping.pkg.prefix.classname"; - - @Test - public void testUserMapping() throws IOException, ClassNotFoundException, - InstantiationException, IllegalAccessException, NoSuchMethodException, - InvocationTargetException { - - // Set the option strings in an "argv" to redirect our srcdir and bindir - String [] argv = { - "--bindir", JAR_GEN_DIR, - "--outdir", CODE_GEN_DIR, - "--class-name", USERMAPPING_CLASS_AND_PACKAGE_NAME, - "--map-column-java", "INTFIELD1=String", - }; - - File ormJarFile = runGenerationTest(argv, - USERMAPPING_CLASS_AND_PACKAGE_NAME); - ClassLoader prevClassLoader = ClassLoaderStack.addJarFile( - ormJarFile.getCanonicalPath(), - USERMAPPING_CLASS_AND_PACKAGE_NAME); - Class tableClass = Class.forName( - USERMAPPING_CLASS_AND_PACKAGE_NAME, - true, - Thread.currentThread().getContextClassLoader()); - - try { - Field intfield = tableClass.getDeclaredField("INTFIELD1"); - - assertEquals(String.class, intfield.getType()); - } catch (NoSuchFieldException ex) { - fail("Can't find field for INTFIELD1"); - } catch (SecurityException ex) { - fail("Can't find field for INTFIELD1"); - } - - if (null != prevClassLoader) { - ClassLoaderStack.setCurrentClassLoader(prevClassLoader); - } - } - - @Test - public void testBrokenUserMapping() throws Exception { - - String [] argv = { - "--bindir", JAR_GEN_DIR, - "--outdir", CODE_GEN_DIR, - "--class-name", USERMAPPING_CLASS_AND_PACKAGE_NAME, - "--map-column-java", "INTFIELD1=NotARealClass", - }; - - try { - runGenerationTest( - argv, - USERMAPPING_CLASS_AND_PACKAGE_NAME); - } catch(IllegalArgumentException e) { - return; - } - fail("we shouldn't successfully generate code"); - } - - /** - * A dummy manager that declares that it ORM is self managed. - */ - public static class DummyDirectManager extends DummyManager { - @Override - public boolean isORMFacilitySelfManaged() { - return true; - } - } - - @Test - public void testNoClassGeneration() throws Exception { - manager = new DummyDirectManager(); - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - }; - - try { - options = new ImportTool().parseArguments(argv, - null, options, true); - } catch (Exception e) { - LOG.error("Could not parse options: " + e.toString()); - } - - CompilationManager compileMgr = new CompilationManager(options); - ClassWriter writer = new ClassWriter(options, manager, - HsqldbTestServer.getTableName(), compileMgr); - - writer.generate(); - - thrown.expect(Exception.class); - compileMgr.compile(); - } - - @Test(timeout = 25000) - public void testWideTableClassGeneration() throws Exception { - createWideTable(); - options = new SqoopOptions(HsqldbTestServer.getDbUrl(), WIDE_TABLE_NAME); - - // Set the option strings in an "argv" to redirect our srcdir and bindir. - String [] argv = { - "--bindir", - JAR_GEN_DIR, - "--outdir", - CODE_GEN_DIR, - }; - - File ormJarFile = runGenerationTest(argv, WIDE_TABLE_NAME, WIDE_TABLE_NAME); - - ClassLoader prevClassLoader = ClassLoaderStack.addJarFile(ormJarFile.getCanonicalPath(), - WIDE_TABLE_NAME); - Class tableClass = Class.forName(WIDE_TABLE_NAME, true, - Thread.currentThread().getContextClassLoader()); - - Object instance = tableClass.newInstance(); - Method setterMethod = tableClass.getMethod("setField", String.class, Object.class); - Random random = new Random(0); - for (int j = 0; j < WIDE_TABLE_ROW_COUNT; ++j) { - for (int i = 0; i < WIDE_TABLE_COLUMN_COUNT; ++i) { - setterMethod.invoke(instance, "INTFIELD" + i, random.nextInt()); - } - } - - if (null != prevClassLoader) { - ClassLoaderStack.setCurrentClassLoader(prevClassLoader); - } - } - - private void createWideTable() throws Exception { - try (Connection conn = testServer.getConnection(); Statement stmt = conn.createStatement();) { - stmt.executeUpdate("DROP TABLE \"" + WIDE_TABLE_NAME + "\" IF EXISTS"); - StringBuilder sb = new StringBuilder("CREATE TABLE \"" + WIDE_TABLE_NAME + "\" ("); - for (int i = 0; i < WIDE_TABLE_COLUMN_COUNT; ++i) { - sb.append("intField" + i + " INT"); - if (i < WIDE_TABLE_COLUMN_COUNT - 1) { - sb.append(","); - } else { - sb.append(")"); - } - } - stmt.executeUpdate(sb.toString()); - conn.commit(); - } - } -}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/orm/TestParseMethods.java ---------------------------------------------------------------------- diff --git a/src/test/com/cloudera/sqoop/orm/TestParseMethods.java b/src/test/com/cloudera/sqoop/orm/TestParseMethods.java deleted file mode 100644 index 017fb9f..0000000 --- a/src/test/com/cloudera/sqoop/orm/TestParseMethods.java +++ /dev/null @@ -1,292 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.orm; - -import java.io.IOException; -import java.util.ArrayList; - -import com.cloudera.sqoop.testutil.*; -import org.apache.commons.cli.ParseException; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.mapred.FileInputFormat; -import org.apache.hadoop.mapred.FileOutputFormat; -import org.apache.hadoop.mapred.JobClient; -import org.apache.hadoop.mapred.JobConf; -import org.apache.sqoop.tool.BaseSqoopTool; - -import com.cloudera.sqoop.SqoopOptions; -import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException; -import com.cloudera.sqoop.config.ConfigurationHelper; - -import com.cloudera.sqoop.testutil.ExplicitSetMapper; -import com.cloudera.sqoop.tool.ImportTool; -import com.cloudera.sqoop.util.ClassLoaderStack; -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -/** - * Test that the parse() methods generated in user SqoopRecord implementations - * work. - */ -public class TestParseMethods extends ImportJobTestCase { - - /** - * Create the argv to pass to Sqoop. - * @return the argv as an array of strings. - */ - private String [] getArgv(boolean includeHadoopFlags, String fieldTerminator, - String lineTerminator, String encloser, String escape, - boolean encloserRequired) { - - ArrayList<String> args = new ArrayList<String>(); - - if (includeHadoopFlags) { - CommonArgs.addHadoopFlags(args); - } - - args.add("--table"); - args.add(getTableName()); - args.add("--warehouse-dir"); - args.add(getWarehouseDir()); - args.add("--connect"); - args.add(HsqldbTestServer.getUrl()); - args.add("--as-textfile"); - args.add("--split-by"); - args.add("DATA_COL0"); // always split by first column. - args.add("--fields-terminated-by"); - args.add(fieldTerminator); - args.add("--lines-terminated-by"); - args.add(lineTerminator); - args.add("--escaped-by"); - args.add(escape); - if (encloserRequired) { - args.add("--enclosed-by"); - } else { - args.add("--optionally-enclosed-by"); - } - args.add(encloser); - args.add("--num-mappers"); - args.add("1"); - - return args.toArray(new String[0]); - } - - @Test - public void testTemporaryRootDirParse() throws Exception { - String customRoot = "customroot"; - String[] args = new String[] {"--"+BaseSqoopTool.TEMP_ROOTDIR_ARG, customRoot}; - - SqoopOptions opts = new ImportTool().parseArguments(args, null, null, true); - - assertEquals(customRoot, opts.getTempRootDir()); - } - - public void runParseTest(String fieldTerminator, String lineTerminator, - String encloser, String escape, boolean encloseRequired) - throws IOException { - - ClassLoader prevClassLoader = null; - - String [] argv = getArgv(true, fieldTerminator, lineTerminator, - encloser, escape, encloseRequired); - runImport(argv); - try { - String tableClassName = getTableName(); - - argv = getArgv(false, fieldTerminator, lineTerminator, encloser, escape, - encloseRequired); - SqoopOptions opts = new ImportTool().parseArguments(argv, null, null, - true); - - CompilationManager compileMgr = new CompilationManager(opts); - String jarFileName = compileMgr.getJarFilename(); - - // Make sure the user's class is loaded into our address space. - prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, - tableClassName); - - JobConf job = new JobConf(); - job.setJar(jarFileName); - - // Tell the job what class we're testing. - job.set(ReparseMapper.USER_TYPE_NAME_KEY, tableClassName); - - // use local mode in the same JVM. - ConfigurationHelper.setJobtrackerAddr(job, "local"); - if (!BaseSqoopTestCase.isOnPhysicalCluster()) { - job.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS); - } - String warehouseDir = getWarehouseDir(); - Path warehousePath = new Path(warehouseDir); - Path inputPath = new Path(warehousePath, getTableName()); - Path outputPath = new Path(warehousePath, getTableName() + "-out"); - - job.setMapperClass(ReparseMapper.class); - job.setNumReduceTasks(0); - FileInputFormat.addInputPath(job, inputPath); - FileOutputFormat.setOutputPath(job, outputPath); - - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(NullWritable.class); - - JobClient.runJob(job); - } catch (InvalidOptionsException ioe) { - fail(ioe.toString()); - } catch (ParseException pe) { - fail(pe.toString()); - } finally { - if (null != prevClassLoader) { - ClassLoaderStack.setCurrentClassLoader(prevClassLoader); - } - } - } - - @Test - public void testDefaults() throws IOException { - String [] types = { "INTEGER", "VARCHAR(32)", "INTEGER" }; - String [] vals = { "64", "'foo'", "128" }; - - createTableWithColTypes(types, vals); - runParseTest(",", "\\n", "\\\"", "\\", false); - } - - @Test - public void testRequiredEnclose() throws IOException { - String [] types = { "INTEGER", "VARCHAR(32)", "INTEGER" }; - String [] vals = { "64", "'foo'", "128" }; - - createTableWithColTypes(types, vals); - runParseTest(",", "\\n", "\\\"", "\\", true); - } - - @Test - public void testStringEscapes() throws IOException { - String [] types = { - "VARCHAR(32)", - "VARCHAR(32)", - "VARCHAR(32)", - "VARCHAR(32)", - "VARCHAR(32)", - }; - String [] vals = { - "'foo'", - "'foo,bar'", - "'foo''bar'", - "'foo\\bar'", - "'foo,bar''baz'", - }; - - createTableWithColTypes(types, vals); - runParseTest(",", "\\n", "\\\'", "\\", false); - } - - @Test - public void testNumericTypes() throws IOException { - String [] types = { - "INTEGER", - "REAL", - "FLOAT", - "DATE", - "TIME", - "TIMESTAMP", - "NUMERIC", - "BOOLEAN", - }; - String [] vals = { - "42", - "36.0", - "127.1", - "'2009-07-02'", - "'11:24:00'", - "'2009-08-13 20:32:00.1234567'", - "92104916282869291837672829102857271948687.287475322", - "true", - }; - - createTableWithColTypes(types, vals); - runParseTest(",", "\\n", "\\\'", "\\", false); - } - - @Test - public void testFieldSetter() throws IOException { - ClassLoader prevClassLoader = null; - - String [] types = { "VARCHAR(32)", "VARCHAR(32)" }; - String [] vals = { "'meep'", "'foo'" }; - createTableWithColTypes(types, vals); - - String [] argv = getArgv(true, ",", "\\n", "\\\'", "\\", false); - runImport(argv); - try { - String tableClassName = getTableName(); - - argv = getArgv(false, ",", "\\n", "\\\'", "\\", false); - SqoopOptions opts = new ImportTool().parseArguments(argv, null, null, - true); - - CompilationManager compileMgr = new CompilationManager(opts); - String jarFileName = compileMgr.getJarFilename(); - - // Make sure the user's class is loaded into our address space. - prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, - tableClassName); - - JobConf job = new JobConf(); - job.setJar(jarFileName); - - // Tell the job what class we're testing. - job.set(ExplicitSetMapper.USER_TYPE_NAME_KEY, tableClassName); - job.set(ExplicitSetMapper.SET_COL_KEY, BASE_COL_NAME + "0"); - job.set(ExplicitSetMapper.SET_VAL_KEY, "this-is-a-test"); - - // use local mode in the same JVM. - ConfigurationHelper.setJobtrackerAddr(job, "local"); - if (!BaseSqoopTestCase.isOnPhysicalCluster()) { - job.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS); - } - String warehouseDir = getWarehouseDir(); - Path warehousePath = new Path(warehouseDir); - Path inputPath = new Path(warehousePath, getTableName()); - Path outputPath = new Path(warehousePath, getTableName() + "-out"); - - job.setMapperClass(ExplicitSetMapper.class); - job.setNumReduceTasks(0); - FileInputFormat.addInputPath(job, inputPath); - FileOutputFormat.setOutputPath(job, outputPath); - - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(NullWritable.class); - - JobClient.runJob(job); - } catch (InvalidOptionsException ioe) { - fail(ioe.toString()); - } catch (ParseException pe) { - fail(pe.toString()); - } finally { - if (null != prevClassLoader) { - ClassLoaderStack.setCurrentClassLoader(prevClassLoader); - } - } - } -} - http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java ---------------------------------------------------------------------- diff --git a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java b/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java deleted file mode 100644 index b5d46d7..0000000 --- a/src/test/com/cloudera/sqoop/testutil/BaseSqoopTestCase.java +++ /dev/null @@ -1,624 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.testutil; - -import com.cloudera.sqoop.ConnFactory; -import com.cloudera.sqoop.SqoopOptions; -import com.cloudera.sqoop.manager.ConnManager; -import com.cloudera.sqoop.metastore.JobData; -import com.cloudera.sqoop.tool.ImportTool; -import com.google.common.collect.ObjectArrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.BasicConfigurator; -import org.apache.sqoop.SqoopJobDataPublisher; -import org.junit.After; -import org.junit.Before; - -import java.io.File; -import java.io.IOException; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Arrays; - -import static org.junit.Assert.fail; - -/** - * Class that implements common methods required for tests. - */ -public abstract class BaseSqoopTestCase { - - public static class DummyDataPublisher extends SqoopJobDataPublisher { - public static String hiveTable; - public static String storeTable; - public static String storeType; - public static String operation; - - @Override - public void publish(Data data) { - hiveTable = data.getHiveTable(); - storeTable = data.getStoreTable(); - storeType = data.getStoreType(); - operation = data.getOperation(); - } - } - - public static final Log LOG = LogFactory.getLog( - BaseSqoopTestCase.class.getName()); - - public static boolean isOnPhysicalCluster() { - return onPhysicalCluster; - } - private static void setOnPhysicalCluster(boolean value) { - onPhysicalCluster = value; - } - - private static boolean onPhysicalCluster = false; - - /** Base directory for all temporary data. */ - public static final String TEMP_BASE_DIR; - - /** Where to import table data to in the local filesystem for testing. */ - public static final String LOCAL_WAREHOUSE_DIR; - - // Initializer for the above - static { - String tmpDir = System.getProperty("test.build.data", "/tmp/"); - if (!tmpDir.endsWith(File.separator)) { - tmpDir = tmpDir + File.separator; - } - - TEMP_BASE_DIR = tmpDir; - LOCAL_WAREHOUSE_DIR = TEMP_BASE_DIR + "sqoop/warehouse"; - } - - // Used if a test manually sets the table name to be used. - private String curTableName; - - protected void setCurTableName(String curName) { - this.curTableName = curName; - } - - /** - * Because of how classloading works, we don't actually want to name - * all the tables the same thing -- they'll actually just use the same - * implementation of the Java class that was classloaded before. So we - * use this counter to uniquify table names. - */ - private static int tableNum = 0; - - /** When creating sequentially-identified tables, what prefix should - * be applied to these tables? - */ - protected String getTablePrefix() { - return "SQOOP_TABLE_"; - } - - protected String getTableName() { - if (null != curTableName) { - return curTableName; - } else { - return getTablePrefix() + Integer.toString(tableNum); - } - } - - protected String getWarehouseDir() { - return LOCAL_WAREHOUSE_DIR; - } - - private String [] colNames; - protected String [] getColNames() { - return colNames; - } - - protected void setColNames(String [] cols) { - if (null == cols) { - this.colNames = null; - } else { - this.colNames = Arrays.copyOf(cols, cols.length); - } - } - - protected HsqldbTestServer getTestServer() { - return testServer; - } - - protected ConnManager getManager() { - return manager; - } - - protected void setManager(ConnManager manager) { - this.manager = manager; - } - - /** - * @return a connection to the database under test. - */ - protected Connection getConnection() { - try { - return getTestServer().getConnection(); - } catch (SQLException sqlE) { - LOG.error("Could not get connection to test server: " + sqlE); - return null; - } - } - - // instance variables populated during setUp, used during tests - private HsqldbTestServer testServer; - private ConnManager manager; - - private static boolean isLog4jConfigured = false; - - protected void incrementTableNum() { - tableNum++; - } - - /** - * @return true if we need an in-memory database to run these tests. - */ - protected boolean useHsqldbTestServer() { - return true; - } - - /** - * @return the connect string to use for interacting with the database. - * If useHsqldbTestServer is false, you need to override this and provide - * a different connect string. - */ - protected String getConnectString() { - return HsqldbTestServer.getUrl(); - } - - /** - * @return a Configuration object used to configure tests. - */ - protected Configuration getConf() { - return new Configuration(); - } - - /** - * @return a new SqoopOptions customized for this particular test, but one - * which has not had any arguments parsed yet. - */ - protected SqoopOptions getSqoopOptions(Configuration conf) { - return new SqoopOptions(conf); - } - - @Before - public void setUp() { - // The assumption is that correct HADOOP configuration will have it set to - // hdfs://namenode - setOnPhysicalCluster( - !CommonArgs.LOCAL_FS.equals(System.getProperty( - CommonArgs.FS_DEFAULT_NAME))); - incrementTableNum(); - - if (!isLog4jConfigured) { - BasicConfigurator.configure(); - isLog4jConfigured = true; - LOG.info("Configured log4j with console appender."); - } - - if (useHsqldbTestServer()) { - testServer = new HsqldbTestServer(); - try { - testServer.resetServer(); - } catch (SQLException sqlE) { - LOG.error("Got SQLException: " + StringUtils.stringifyException(sqlE)); - fail("Got SQLException: " + StringUtils.stringifyException(sqlE)); - } catch (ClassNotFoundException cnfe) { - LOG.error("Could not find class for db driver: " - + StringUtils.stringifyException(cnfe)); - fail("Could not find class for db driver: " - + StringUtils.stringifyException(cnfe)); - } - - manager = testServer.getManager(); - } else { - Configuration conf = getConf(); - //Need to disable OraOop for existing tests - conf.set("oraoop.disabled", "true"); - SqoopOptions opts = getSqoopOptions(conf); - opts.setConnectString(getConnectString()); - opts.setTableName(getTableName()); - ConnFactory f = new ConnFactory(conf); - try { - this.manager = f.getManager(new JobData(opts, new ImportTool())); - } catch (IOException ioe) { - fail("IOException instantiating manager: " - + StringUtils.stringifyException(ioe)); - } - } - } - - private void guaranteeCleanWarehouse() { - if (isOnPhysicalCluster()) { - Path warehousePath = new Path(this.getWarehouseDir()); - try { - FileSystem fs = FileSystem.get(getConf()); - fs.delete(warehousePath, true); - } catch (IOException e) { - LOG.warn(e); - } - } else { - File s = new File(getWarehouseDir()); - if (!s.delete()) { - LOG.warn("Cannot delete " + s.getPath()); - } - } - } - - @After - public void tearDown() { - setCurTableName(null); // clear user-override table name. - - try { - if (null != manager) { - manager.close(); - manager = null; - } - } catch (SQLException sqlE) { - LOG.error("Got SQLException: " + StringUtils.stringifyException(sqlE)); - fail("Got SQLException: " + StringUtils.stringifyException(sqlE)); - } - guaranteeCleanWarehouse(); - } - - public static final String BASE_COL_NAME = "DATA_COL"; - - protected String getColName(int i) { - return BASE_COL_NAME + i; - } - - /** - * Drop a table if it already exists in the database. - * @param table the name of the table to drop. - * @throws SQLException if something goes wrong. - */ - protected void dropTableIfExists(String table) throws SQLException { - Connection conn = getManager().getConnection(); - PreparedStatement statement = conn.prepareStatement(dropTableIfExistsCommand(table), - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - try { - statement.executeUpdate(); - conn.commit(); - } finally { - statement.close(); - } - } - - protected String dropTableIfExistsCommand(String table) { - return "DROP TABLE " + manager.escapeTableName(table) + " IF EXISTS"; - } - - protected void createTableWithColTypesAndNames(String[] colNames, - String[] colTypes, - String[] vals) { - createTableWithColTypesAndNames(getTableName(), colNames, colTypes, vals); - } - - /** - * Create a table with a set of columns with their names and add a row of values. - * @param newTableName The name of the new table - * @param colNames Column names - * @param colTypes the types of the columns to make - * @param vals the SQL text for each value to insert - */ - protected void createTableWithColTypesAndNames(String newTableName, - String[] colNames, - String[] colTypes, - String[] vals) { - assert colNames != null; - assert colTypes != null; - assert colNames.length == colTypes.length; - - Connection conn = null; - PreparedStatement statement = null; - String createTableStr = null; - String columnDefStr = ""; - - try { - try { - dropTableIfExists(newTableName); - - conn = getManager().getConnection(); - - for (int i = 0; i < colTypes.length; i++) { - columnDefStr += manager.escapeColName(colNames[i].toUpperCase()) + " " + colTypes[i]; - if (i < colTypes.length - 1) { - columnDefStr += ", "; - } - } - - createTableStr = "CREATE TABLE " + manager.escapeTableName(newTableName) + "(" + columnDefStr + ")"; - LOG.info("Creating table: " + createTableStr); - statement = conn.prepareStatement( - createTableStr, - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - statement.executeUpdate(); - } catch (SQLException sqlException) { - fail("Could not create table: " - + StringUtils.stringifyException(sqlException)); - } finally { - if (null != statement) { - try { - statement.close(); - } catch (SQLException se) { - // Ignore exception on close. - } - - statement = null; - } - } - - for (int count=0; vals != null && count < vals.length/colTypes.length; - ++count ) { - String columnListStr = ""; - String valueListStr = ""; - for (int i = 0; i < colTypes.length; i++) { - columnListStr += manager.escapeColName(colNames[i].toUpperCase()); - valueListStr += vals[count * colTypes.length + i]; - if (i < colTypes.length - 1) { - columnListStr += ", "; - valueListStr += ", "; - } - } - try { - String insertValsStr = "INSERT INTO " + manager.escapeTableName(newTableName) + "(" + columnListStr + ")" - + " VALUES(" + valueListStr + ")"; - LOG.info("Inserting values: " + insertValsStr); - statement = conn.prepareStatement( - insertValsStr, - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - statement.executeUpdate(); - } catch (SQLException sqlException) { - fail("Could not insert into table: " - + StringUtils.stringifyException(sqlException)); - } finally { - if (null != statement) { - try { - statement.close(); - } catch (SQLException se) { - // Ignore exception on close. - } - - statement = null; - } - } - } - - conn.commit(); - this.colNames = colNames; - } catch (SQLException se) { - if (null != conn) { - try { - conn.close(); - } catch (SQLException connSE) { - // Ignore exception on close. - } - } - fail("Could not create table: " + StringUtils.stringifyException(se)); - } - } - - /** - * insert into a table with a set of columns values for a given row. - * @param colTypes the types of the columns to make - * @param vals the SQL text for each value to insert - */ - protected void insertIntoTable(String[] colTypes, String[] vals) { - assert colNames != null; - assert colNames.length == vals.length; - - Connection conn = null; - PreparedStatement statement = null; - - String[] colNames = new String[vals.length]; - for( int i = 0; i < vals.length; i++) { - colNames[i] = BASE_COL_NAME + Integer.toString(i); - } - try { - conn = getManager().getConnection(); - for (int count=0; vals != null && count < vals.length/colTypes.length; - ++count ) { - String columnListStr = ""; - String valueListStr = ""; - for (int i = 0; i < colTypes.length; i++) { - columnListStr += manager.escapeColName(colNames[i].toUpperCase()); - valueListStr += vals[count * colTypes.length + i]; - if (i < colTypes.length - 1) { - columnListStr += ", "; - valueListStr += ", "; - } - } - try { - String insertValsStr = "INSERT INTO " + manager.escapeTableName(getTableName()) + "(" + columnListStr + ")" - + " VALUES(" + valueListStr + ")"; - LOG.info("Inserting values: " + insertValsStr); - statement = conn.prepareStatement( - insertValsStr, - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - statement.executeUpdate(); - } catch (SQLException sqlException) { - fail("Could not insert into table: " - + StringUtils.stringifyException(sqlException)); - } finally { - if (null != statement) { - try { - statement.close(); - } catch (SQLException se) { - // Ignore exception on close. - } - - statement = null; - } - } - } - conn.commit(); - this.colNames = colNames; - } catch (SQLException se) { - if (null != conn) { - try { - conn.close(); - } catch (SQLException connSE) { - // Ignore exception on close. - } - } - fail("Could not create table: " + StringUtils.stringifyException(se)); - } - - } - - /** - * update a table with a set of columns values for a given row. - * @param colTypes the types of the columns to make - * @param vals the SQL text for each value to insert - */ - protected void updateTable(String[] colTypes, String[] vals) { - assert colNames != null; - assert colNames.length == vals.length; - - Connection conn = null; - PreparedStatement statement = null; - - String[] colNames = new String[vals.length]; - for( int i = 0; i < vals.length; i++) { - colNames[i] = BASE_COL_NAME + Integer.toString(i); - } - - try { - conn = getManager().getConnection(); - for (int count=0; vals != null && count < vals.length/colNames.length; - ++count ) { - String updateStr = ""; - for (int i = 1; i < colNames.length; i++) { - updateStr += manager.escapeColName(colNames[i].toUpperCase()) + " = "+vals[count * colNames.length + i]; - if (i < colNames.length - 1) { - updateStr += ", "; - } - } - updateStr += " WHERE "+colNames[0]+"="+vals[0]+""; - try { - String updateValsStr = "UPDATE " + manager.escapeTableName(getTableName()) + " SET " + updateStr; - LOG.info("updating values: " + updateValsStr); - statement = conn.prepareStatement( - updateValsStr, - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - statement.executeUpdate(); - } catch (SQLException sqlException) { - fail("Could not update table: " - + StringUtils.stringifyException(sqlException)); - } finally { - if (null != statement) { - try { - statement.close(); - } catch (SQLException se) { - // Ignore exception on close. - } - statement = null; - } - } - } - - conn.commit(); - this.colNames = colNames; - } catch (SQLException se) { - if (null != conn) { - try { - conn.close(); - } catch (SQLException connSE) { - // Ignore exception on close. - } - } - fail("Could not update table: " + StringUtils.stringifyException(se)); - } - } - - /** - * Create a table with a set of columns and add a row of values. - * @param colTypes the types of the columns to make - * @param vals the SQL text for each value to insert - */ - protected void createTableWithColTypes(String [] colTypes, String [] vals) { - String[] colNames = new String[colTypes.length]; - for( int i = 0; i < colTypes.length; i++) { - colNames[i] = BASE_COL_NAME + Integer.toString(i); - } - createTableWithColTypesAndNames(colNames, colTypes, vals); - } - - /** - * Create a table with a single column and put a data element in it. - * @param colType the type of the column to create - * @param val the value to insert (reformatted as a string) - */ - protected void createTableForColType(String colType, String val) { - String [] types = { colType }; - String [] vals = { val }; - - createTableWithColTypes(types, vals); - } - - protected Path getTablePath() { - Path warehousePath = new Path(getWarehouseDir()); - Path tablePath = new Path(warehousePath, getTableName()); - return tablePath; - } - - protected Path getDataFilePath() { - return new Path(getTablePath(), "part-m-00000"); - } - - protected void removeTableDir() { - File tableDirFile = new File(getTablePath().toString()); - if (tableDirFile.exists()) { - // Remove the directory where the table will be imported to, - // prior to running the MapReduce job. - if (!DirUtil.deleteDir(tableDirFile)) { - LOG.warn("Could not delete table directory: " - + tableDirFile.getAbsolutePath()); - } - } - } - - /** - * Create a new string array with 'moreEntries' appended to the 'entries' - * array. - * @param entries initial entries in the array - * @param moreEntries variable-length additional entries. - * @return an array containing entries with all of moreEntries appended. - */ - protected String [] newStrArray(String [] entries, String... moreEntries) { - if (null == moreEntries) { - return entries; - } - - if (null == entries) { - entries = new String[0]; - } - - return ObjectArrays.concat(entries, moreEntries, String.class); - } -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/testutil/CommonArgs.java ---------------------------------------------------------------------- diff --git a/src/test/com/cloudera/sqoop/testutil/CommonArgs.java b/src/test/com/cloudera/sqoop/testutil/CommonArgs.java deleted file mode 100644 index ea4dad5..0000000 --- a/src/test/com/cloudera/sqoop/testutil/CommonArgs.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.testutil; - -import java.util.List; - -/** - * Sets common arguments to Sqoop sub-instances for testing. - */ -public final class CommonArgs { - - private CommonArgs() { - } - - public static final String LOCAL_FS="file:///"; - // this key is deprecated past 0.21 - public static final String FS_DEFAULT_NAME="fs.defaultfs.name"; - public static final String FS_DEFAULTFS="fs.defaultFS"; - - public static String getJobtrackerAddress() { - return System.getProperty("mapreduce.jobtracker.address", "local"); - } - public static String getDefaultFS() { - return System.getProperty(FS_DEFAULT_NAME, LOCAL_FS); - } - /** - * Craft a list of arguments that are common to (virtually) - * all Sqoop programs. - */ - public static void addHadoopFlags(List<String> args) { - args.add("-D"); - args.add("mapreduce.jobtracker.address=local"); - args.add("-D"); - args.add("mapreduce.job.maps=1"); - args.add("-D"); - args.add("fs.defaultFS=file:///"); - args.add("-D"); - args.add("jobclient.completion.poll.interval=50"); - args.add("-D"); - args.add("jobclient.progress.monitor.poll.interval=50"); - } -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/testutil/DirUtil.java ---------------------------------------------------------------------- diff --git a/src/test/com/cloudera/sqoop/testutil/DirUtil.java b/src/test/com/cloudera/sqoop/testutil/DirUtil.java deleted file mode 100644 index c924e3c..0000000 --- a/src/test/com/cloudera/sqoop/testutil/DirUtil.java +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.testutil; - -import java.io.File; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -/** - * Misc directory operations. - */ -public final class DirUtil { - - private DirUtil() { - } - - public static final Log LOG = LogFactory.getLog(DirUtil.class.getName()); - - /** - * recursively delete a dir and its children. - * @param dir - * @return true on succesful removal of a dir - */ - public static boolean deleteDir(File dir) { - if (dir.isDirectory()) { - String [] children = dir.list(); - for (int i = 0; i < children.length; i++) { - File f = new File(dir, children[i]); - boolean success = deleteDir(f); - if (!success) { - LOG.warn("Could not delete " + f.getAbsolutePath()); - return false; - } - } - } - - // The directory is now empty so delete it too. - LOG.debug("Removing: " + dir); - return dir.delete(); - } - -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/testutil/ExplicitSetMapper.java ---------------------------------------------------------------------- diff --git a/src/test/com/cloudera/sqoop/testutil/ExplicitSetMapper.java b/src/test/com/cloudera/sqoop/testutil/ExplicitSetMapper.java deleted file mode 100644 index 344dc6e..0000000 --- a/src/test/com/cloudera/sqoop/testutil/ExplicitSetMapper.java +++ /dev/null @@ -1,109 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.testutil; - -import java.io.IOException; - -import java.util.Map; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.NullWritable; -import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.MapReduceBase; -import org.apache.hadoop.mapred.Mapper; -import org.apache.hadoop.mapred.OutputCollector; -import org.apache.hadoop.mapred.Reporter; -import com.cloudera.sqoop.lib.SqoopRecord; -import org.apache.hadoop.util.ReflectionUtils; - -/** - * Test harness mapper. Instantiate the user's specific type, explicitly - * set the value of a field with setField(), and read the field value - * back via the field map. Throw an IOException if it doesn't get set - * correctly. - */ -public class ExplicitSetMapper extends MapReduceBase - implements Mapper<LongWritable, Text, Text, NullWritable> { - - public static final Log LOG = LogFactory.getLog( - ExplicitSetMapper.class.getName()); - - public static final String USER_TYPE_NAME_KEY = "sqoop.user.class"; - public static final String SET_COL_KEY = "sqoop.explicit.set.col"; - public static final String SET_VAL_KEY = "sqoop.explicit.set.val"; - - private SqoopRecord userRecord; - private String setCol; - private String setVal; - - public void configure(JobConf job) { - String userTypeName = job.get(USER_TYPE_NAME_KEY); - if (null == userTypeName) { - throw new RuntimeException("Unconfigured parameter: " - + USER_TYPE_NAME_KEY); - } - - setCol = job.get(SET_COL_KEY); - setVal = job.get(SET_VAL_KEY); - - LOG.info("User type name set to " + userTypeName); - LOG.info("Will try to set col " + setCol + " to " + setVal); - - this.userRecord = null; - - try { - Configuration conf = new Configuration(); - Class userClass = Class.forName(userTypeName, true, - Thread.currentThread().getContextClassLoader()); - this.userRecord = - (SqoopRecord) ReflectionUtils.newInstance(userClass, conf); - } catch (ClassNotFoundException cnfe) { - // handled by the next block. - LOG.error("ClassNotFound exception: " + cnfe.toString()); - } catch (Exception e) { - LOG.error("Got an exception reflecting user class: " + e.toString()); - } - - if (null == this.userRecord) { - LOG.error("Could not instantiate user record of type " + userTypeName); - throw new RuntimeException("Could not instantiate user record of type " - + userTypeName); - } - } - - public void map(LongWritable key, Text val, - OutputCollector<Text, NullWritable> out, Reporter r) throws IOException { - - // Try to set the field. - userRecord.setField(setCol, setVal); - Map<String, Object> fieldVals = userRecord.getFieldMap(); - if (!fieldVals.get(setCol).equals(setVal)) { - throw new IOException("Could not set column value! Got back " - + fieldVals.get(setCol)); - } else { - LOG.info("Correctly changed value for col " + setCol + " to " + setVal); - } - } -} - http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java ---------------------------------------------------------------------- diff --git a/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java b/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java deleted file mode 100644 index 2433275..0000000 --- a/src/test/com/cloudera/sqoop/testutil/ExportJobTestCase.java +++ /dev/null @@ -1,366 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.testutil; - -import java.io.IOException; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.util.StringUtils; -import org.junit.Before; - -import com.cloudera.sqoop.Sqoop; -import com.cloudera.sqoop.SqoopOptions; -import com.cloudera.sqoop.mapreduce.ExportOutputFormat; -import com.cloudera.sqoop.tool.ExportTool; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -/** - * Class that implements common methods required for tests which export data - * from HDFS to databases, to verify correct export. - */ -public abstract class ExportJobTestCase extends BaseSqoopTestCase { - - public static final Log LOG = LogFactory.getLog( - ExportJobTestCase.class.getName()); - - @Before - public void setUp() { - // start the server - super.setUp(); - - if (useHsqldbTestServer()) { - // throw away any existing data that might be in the database. - try { - this.getTestServer().dropExistingSchema(); - } catch (SQLException sqlE) { - fail(sqlE.toString()); - } - } - } - - protected String getTablePrefix() { - return "EXPORT_TABLE_"; - } - - /** - * @return the maximum rows to fold into an INSERT statement. - * HSQLDB can only support the single-row INSERT syntax. Other databases - * can support greater numbers of rows per statement. - */ - protected int getMaxRowsPerStatement() { - return 1; - } - - /** - * Create the argv to pass to Sqoop. - * @param includeHadoopFlags if true, then include -D various.settings=values - * @param rowsPerStmt number of rows to export in a single INSERT statement. - * @param statementsPerTx ## of statements to use in a transaction. - * @return the argv as an array of strings. - */ - protected String [] getArgv(boolean includeHadoopFlags, - int rowsPerStmt, int statementsPerTx, String... additionalArgv) { - ArrayList<String> args = new ArrayList<String>(); - - if (includeHadoopFlags) { - CommonArgs.addHadoopFlags(args); - args.add("-D"); - int realRowsPerStmt = Math.min(rowsPerStmt, getMaxRowsPerStatement()); - if (realRowsPerStmt != rowsPerStmt) { - LOG.warn("Rows per statement set to " + realRowsPerStmt - + " by getMaxRowsPerStatement() limit."); - } - args.add(ExportOutputFormat.RECORDS_PER_STATEMENT_KEY + "=" - + realRowsPerStmt); - args.add("-D"); - args.add(ExportOutputFormat.STATEMENTS_PER_TRANSACTION_KEY + "=" - + statementsPerTx); - } - - // Any additional Hadoop flags (-D foo=bar) are prepended. - if (null != additionalArgv) { - boolean prevIsFlag = false; - for (String arg : additionalArgv) { - if (arg.equals("-D")) { - args.add(arg); - prevIsFlag = true; - } else if (prevIsFlag) { - args.add(arg); - prevIsFlag = false; - } - } - } - boolean isHCatJob = false; - // The sqoop-specific additional args are then added. - if (null != additionalArgv) { - boolean prevIsFlag = false; - for (String arg : additionalArgv) { - if (arg.equals("-D")) { - prevIsFlag = true; - continue; - } else if (prevIsFlag) { - prevIsFlag = false; - continue; - } else { - // normal argument. - if (!isHCatJob && arg.equals("--hcatalog-table")) { - isHCatJob = true; - } - args.add(arg); - } - } - } - - if (usesSQLtable()) { - args.add("--table"); - args.add(getTableName()); - } - // Only add export-dir if hcatalog-table is not there in additional argv - if (!isHCatJob) { - args.add("--export-dir"); - args.add(getTablePath().toString()); - } - args.add("--connect"); - args.add(getConnectString()); - args.add("--fields-terminated-by"); - args.add("\\t"); - args.add("--lines-terminated-by"); - args.add("\\n"); - args.add("-m"); - args.add("1"); - - LOG.debug("args:"); - for (String a : args) { - LOG.debug(" " + a); - } - - return args.toArray(new String[0]); - } - - protected boolean usesSQLtable() { - return true; - } - - /** When exporting text columns, what should the text contain? */ - protected String getMsgPrefix() { - return "textfield"; - } - - - /** @return the minimum 'id' value in the table */ - protected int getMinRowId(Connection conn) throws SQLException { - PreparedStatement statement = conn.prepareStatement( - "SELECT MIN(\"ID\") FROM " + getTableName(), - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - int minVal = 0; - try { - ResultSet rs = statement.executeQuery(); - try { - rs.next(); - minVal = rs.getInt(1); - } finally { - rs.close(); - } - } finally { - statement.close(); - } - - return minVal; - } - - /** @return the maximum 'id' value in the table */ - protected int getMaxRowId(Connection conn) throws SQLException { - PreparedStatement statement = conn.prepareStatement( - "SELECT MAX(\"ID\") FROM " + getTableName(), - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - int maxVal = 0; - try { - ResultSet rs = statement.executeQuery(); - try { - rs.next(); - maxVal = rs.getInt(1); - } finally { - rs.close(); - } - } finally { - statement.close(); - } - - return maxVal; - } - - /** - * Check that we got back the expected row set. - * @param expectedNumRecords The number of records we expected to load - * into the database. - */ - protected void verifyExport(int expectedNumRecords) - throws IOException, SQLException { - Connection conn = getConnection(); - verifyExport(expectedNumRecords, conn); - } - - /** - * Check that we got back the expected row set. - * @param expectedNumRecords The number of records we expected to load - * into the database. - * @param conn the db connection to use. - */ - protected void verifyExport(int expectedNumRecords, Connection conn) - throws IOException, SQLException { - LOG.info("Verifying export: " + getTableName()); - // Check that we got back the correct number of records. - PreparedStatement statement = conn.prepareStatement( - "SELECT COUNT(*) FROM " + getTableName(), - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - int actualNumRecords = 0; - ResultSet rs = null; - try { - rs = statement.executeQuery(); - try { - rs.next(); - actualNumRecords = rs.getInt(1); - } finally { - rs.close(); - } - } finally { - statement.close(); - } - - assertEquals("Got back unexpected row count", expectedNumRecords, - actualNumRecords); - - if (expectedNumRecords == 0) { - return; // Nothing more to verify. - } - - // Check that we start with row 0. - int minVal = getMinRowId(conn); - assertEquals("Minimum row was not zero", 0, minVal); - - // Check that the last row we loaded is numRows - 1 - int maxVal = getMaxRowId(conn); - assertEquals("Maximum row had invalid id", expectedNumRecords - 1, maxVal); - - // Check that the string values associated with these points match up. - statement = conn.prepareStatement("SELECT \"MSG\" FROM " + getTableName() - + " WHERE \"ID\" = " + minVal, - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - String minMsg = ""; - try { - rs = statement.executeQuery(); - try { - rs.next(); - minMsg = rs.getString(1); - } finally { - rs.close(); - } - } finally { - statement.close(); - } - - assertEquals("Invalid msg field for min value", getMsgPrefix() + minVal, - minMsg); - - statement = conn.prepareStatement("SELECT \"MSG\" FROM " + getTableName() - + " WHERE \"ID\" = " + maxVal, - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - String maxMsg = ""; - try { - rs = statement.executeQuery(); - try { - rs.next(); - maxMsg = rs.getString(1); - } finally { - rs.close(); - } - } finally { - statement.close(); - } - - assertEquals("Invalid msg field for min value", getMsgPrefix() + maxVal, maxMsg); - } - - // Verify Export Method For checking update's : Issue [SQOOP-2846] - protected void verifyExport(String expectedValue) throws IOException, SQLException { - Connection conn = getConnection(); - LOG.info("Verifying export: " + getTableName()); - // Check that we got back the correct number of records. - PreparedStatement statement = conn.prepareStatement("SELECT MSG FROM " + getTableName(), - ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); - String actualValue = null; - ResultSet rs = null; - try { - rs = statement.executeQuery(); - try { - rs.next(); - actualValue = rs.getString(1); - } finally { - rs.close(); - } - } finally { - statement.close(); - } - - assertEquals("Got back unexpected row count", expectedValue, actualValue); - } - - /** - * Run a MapReduce-based export (using the argv provided to control - * execution). - * @return the generated jar filename - */ - protected List<String> runExport(String [] argv) throws IOException { - // run the tool through the normal entry-point. - int ret; - List<String> generatedJars = null; - try { - ExportTool exporter = new ExportTool(); - Configuration conf = getConf(); - //Need to disable OraOop for existing tests - conf.set("oraoop.disabled", "true"); - SqoopOptions opts = getSqoopOptions(conf); - Sqoop sqoop = new Sqoop(exporter, conf, opts); - ret = Sqoop.runSqoop(sqoop, argv); - generatedJars = exporter.getGeneratedJarFiles(); - } catch (Exception e) { - LOG.error("Got exception running Sqoop: " - + StringUtils.stringifyException(e)); - ret = 1; - } - - // expect a successful return. - if (0 != ret) { - throw new IOException("Failure during job; return status " + ret); - } - - return generatedJars; - } - -} http://git-wip-us.apache.org/repos/asf/sqoop/blob/6984a36c/src/test/com/cloudera/sqoop/testutil/HsqldbTestServer.java ---------------------------------------------------------------------- diff --git a/src/test/com/cloudera/sqoop/testutil/HsqldbTestServer.java b/src/test/com/cloudera/sqoop/testutil/HsqldbTestServer.java deleted file mode 100644 index ad68b61..0000000 --- a/src/test/com/cloudera/sqoop/testutil/HsqldbTestServer.java +++ /dev/null @@ -1,294 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.cloudera.sqoop.testutil; - -import java.util.Arrays; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.sql.Statement; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.hsqldb.Server; - -import com.cloudera.sqoop.SqoopOptions; -import com.cloudera.sqoop.manager.ConnManager; -import com.cloudera.sqoop.manager.HsqldbManager; - -/** - * Create a simple hsqldb server and schema to use for testing. - */ -public class HsqldbTestServer { - public static final Log LOG = - LogFactory.getLog(HsqldbTestServer.class.getName()); - - // singleton server instance. - private static Server server; - - // if -Dhsql.server.host hasn't been set to something like - // hsql://localhost.localdomain/ a defaul in-mem DB will be used - private static final String IN_MEM = "mem:"; - public static String getServerHost() { - String host = System.getProperty("hsql.server.host", IN_MEM); - if (!host.endsWith("/")) { host += "/"; } - return host; - } - - private static boolean inMemoryDB = IN_MEM.equals(getServerHost()); - - // Database name can be altered too - private static final String DATABASE_NAME = - System.getProperty("hsql.database.name", "db1"); - - // hsqldb always capitalizes table and column names - private static final String DUMMY_TABLE_NAME = "TWOINTTABLE"; - private static final String [] TWO_INT_TABLE_FIELDS = { - "INTFIELD1", - "INTFIELD2", - }; - - private static final String EMPLOYEE_TABLE_NAME = "EMPLOYEES"; - - private static final String DB_URL = "jdbc:hsqldb:" - + getServerHost() + DATABASE_NAME; - private static final String DRIVER_CLASS = "org.hsqldb.jdbcDriver"; - - // all user-created HSQLDB tables are in the "PUBLIC" schema when connected - // to a database. - private static final String HSQLDB_SCHEMA_NAME = "PUBLIC"; - - public static String getSchemaName() { - return HSQLDB_SCHEMA_NAME; - } - - public static String [] getFieldNames() { - return Arrays.copyOf(TWO_INT_TABLE_FIELDS, TWO_INT_TABLE_FIELDS.length); - } - - public static String getUrl() { - return DB_URL; - } - - public static String getTableName() { - return DUMMY_TABLE_NAME; - } - - public static String getDatabaseName() { - return DATABASE_NAME; - } - - /** - * start the server. - */ - public void start() { - if (null == server) { - LOG.info("Starting new hsqldb server; database=" + DATABASE_NAME); - String tmpDir = System.getProperty("test.build.data", "/tmp/"); - String dbLocation = tmpDir + "/sqoop/testdb.file"; - if (inMemoryDB) {dbLocation = IN_MEM; } - server = new Server(); - - server.setDatabaseName(0, DATABASE_NAME); - server.putPropertiesFromString("database.0=" + dbLocation - + ";no_system_exit=true"); - server.start(); - } - } - - public void stop() { - if (null == server) { - return; - } - server.stop(); - server = null; - } - - public Connection getConnection() throws SQLException { - return getConnection(null, null); - } - - public Connection getConnection(String user, String password) throws SQLException { - try { - Class.forName(DRIVER_CLASS); - } catch (ClassNotFoundException cnfe) { - LOG.error("Could not get connection; driver class not found: " - + DRIVER_CLASS); - return null; - } - - Connection connection = DriverManager.getConnection(DB_URL, user, password); - connection.setAutoCommit(false); - return connection; - } - - /** - * Returns database URL for the server instance. - * @return String representation of DB_URL - */ - public static String getDbUrl() { - return DB_URL; - } - - /** - * Create a table. - */ - public void createSchema() throws SQLException { - - Connection connection = null; - Statement st = null; - - try { - connection = getConnection(); - - st = connection.createStatement(); - st.executeUpdate("DROP TABLE \"" + DUMMY_TABLE_NAME + "\" IF EXISTS"); - st.executeUpdate("CREATE TABLE \"" + DUMMY_TABLE_NAME + "\"(intField1 INT, intField2 INT)"); - - connection.commit(); - } finally { - if (null != st) { - st.close(); - } - - if (null != connection) { - connection.close(); - } - } - } - - /** - * @return the sum of the integers in the first column of TWOINTTABLE. - */ - public static int getFirstColSum() { - return 1 + 3 + 5 + 7; - } - - /** - * Fill the table with some data. - */ - public void populateData() throws SQLException { - - Connection connection = null; - Statement st = null; - - try { - connection = getConnection(); - - st = connection.createStatement(); - st.executeUpdate("INSERT INTO \"" + DUMMY_TABLE_NAME + "\" VALUES(1, 8)"); - st.executeUpdate("INSERT INTO \"" + DUMMY_TABLE_NAME + "\" VALUES(3, 6)"); - st.executeUpdate("INSERT INTO \"" + DUMMY_TABLE_NAME + "\" VALUES(5, 4)"); - st.executeUpdate("INSERT INTO \"" + DUMMY_TABLE_NAME + "\" VALUES(7, 2)"); - - connection.commit(); - } finally { - if (null != st) { - st.close(); - } - - if (null != connection) { - connection.close(); - } - } - } - - public void createEmployeeDemo() throws SQLException, ClassNotFoundException { - Class.forName(DRIVER_CLASS); - - Connection connection = null; - Statement st = null; - - try { - connection = getConnection(); - - st = connection.createStatement(); - st.executeUpdate("DROP TABLE \"" + EMPLOYEE_TABLE_NAME + "\" IF EXISTS"); - st.executeUpdate("CREATE TABLE \"" + EMPLOYEE_TABLE_NAME + "\"(emp_id INT NOT NULL PRIMARY KEY, name VARCHAR(64))"); - - st.executeUpdate("INSERT INTO \"" + EMPLOYEE_TABLE_NAME + "\" VALUES(1, 'Aaron')"); - st.executeUpdate("INSERT INTO \"" + EMPLOYEE_TABLE_NAME + "\" VALUES(2, 'Joe')"); - st.executeUpdate("INSERT INTO \"" + EMPLOYEE_TABLE_NAME + "\" VALUES(3, 'Jim')"); - st.executeUpdate("INSERT INTO \"" + EMPLOYEE_TABLE_NAME + "\" VALUES(4, 'Lisa')"); - - connection.commit(); - } finally { - if (null != st) { - st.close(); - } - - if (null != connection) { - connection.close(); - } - } - } - - /** - * Delete any existing tables. - */ - public void dropExistingSchema() throws SQLException { - ConnManager mgr = getManager(); - String [] tables = mgr.listTables(); - if (null != tables) { - Connection conn = mgr.getConnection(); - for (String table : tables) { - Statement s = conn.createStatement(); - try { - s.executeUpdate("DROP TABLE \"" + table + "\""); - conn.commit(); - } finally { - s.close(); - } - } - } - } - - /** - * Creates an hsqldb server, fills it with tables and data. - */ - public void resetServer() throws ClassNotFoundException, SQLException { - start(); - dropExistingSchema(); - createSchema(); - populateData(); - } - - public SqoopOptions getSqoopOptions() { - return new SqoopOptions(HsqldbTestServer.getUrl(), - HsqldbTestServer.getTableName()); - } - - public ConnManager getManager() { - return new HsqldbManager(getSqoopOptions()); - } - - public void createNewUser(String username, String password) throws SQLException { - try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { - statement.executeUpdate(String.format("CREATE USER %s PASSWORD %s ADMIN", username, password)); - } - } - - public void changePasswordForUser(String username, String newPassword) throws SQLException { - try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { - statement.executeUpdate(String.format("ALTER USER %s SET PASSWORD %s", username, newPassword)); - } - } - -}
