Added: 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java
URL: 
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java?rev=778646&view=auto
==============================================================================
--- 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java
 (added)
+++ 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/TestSqlManager.java
 Tue May 26 10:29:38 2009
@@ -0,0 +1,220 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.manager;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
+
+/**
+ * Test methods of the generic SqlManager implementation.
+ *
+ * 
+ *
+ */
+public class TestSqlManager extends TestCase {
+
+  public static final Log LOG = 
LogFactory.getLog(TestSqlManager.class.getName());
+
+  /** the name of a table that doesn't exist. */
+  static final String MISSING_TABLE = "MISSING_TABLE";
+
+  // instance variables populated during setUp, used during tests
+  private HsqldbTestServer testServer;
+  private ConnManager manager;
+
+  @Before
+  public void setUp() {
+    testServer = new HsqldbTestServer();
+    try {
+      testServer.resetServer();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    } catch (ClassNotFoundException cnfe) {
+      LOG.error("Could not find class for db driver: " + cnfe.toString());
+      fail("Could not find class for db driver: " + cnfe.toString());
+    }
+
+    manager = testServer.getManager();
+  }
+
+  @After
+  public void tearDown() {
+    try {
+      manager.close();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    }
+  }
+
+  @Test
+  public void testListColNames() {
+    String [] colNames = 
manager.getColumnNames(HsqldbTestServer.getTableName());
+    assertNotNull("manager returned no colname list", colNames);
+    assertEquals("Table list should be length 2", 2, colNames.length);
+    String [] knownFields = HsqldbTestServer.getFieldNames();
+    for (int i = 0; i < colNames.length; i++) {
+      assertEquals(knownFields[i], colNames[i]);
+    }
+  }
+
+  @Test
+  public void testListColTypes() {
+    Map<String, Integer> types = 
manager.getColumnTypes(HsqldbTestServer.getTableName());
+
+    assertNotNull("manager returned no types map", types);
+    assertEquals("Map should be size=2", 2, types.size());
+    assertEquals(types.get("INTFIELD1").intValue(), Types.INTEGER);
+    assertEquals(types.get("INTFIELD2").intValue(), Types.INTEGER);
+  }
+
+  @Test
+  public void testMissingTableColNames() {
+    String [] colNames = manager.getColumnNames(MISSING_TABLE);
+    assertNull("No column names should be returned for missing table", 
colNames);
+  }
+
+  @Test
+  public void testMissingTableColTypes() {
+    Map<String, Integer> colTypes = manager.getColumnTypes(MISSING_TABLE);
+    assertNull("No column types should be returned for missing table", 
colTypes);
+  }
+
+  @Test
+  public void testListTables() {
+    String [] tables = manager.listTables();
+    for (String table : tables) {
+      System.err.println("Got table: " + table);
+    }
+    assertNotNull("manager returned no table list", tables);
+    assertEquals("Table list should be length 1", 1, tables.length);
+    assertEquals(HsqldbTestServer.getTableName(), tables[0]);
+  }
+
+  // constants related to testReadTable()
+  final static int EXPECTED_NUM_ROWS = 4;
+  final static int EXPECTED_COL1_SUM = 16;
+  final static int EXPECTED_COL2_SUM = 20;
+
+  @Test
+  public void testReadTable() {
+    try {
+      ResultSet results = manager.readTable(HsqldbTestServer.getTableName(),
+          HsqldbTestServer.getFieldNames());
+
+      assertNotNull("ResultSet from readTable() is null!", results);
+
+      ResultSetMetaData metaData = results.getMetaData();
+      assertNotNull("ResultSetMetadata is null in readTable()", metaData);
+
+      // ensure that we get the correct number of columns back
+      assertEquals("Number of returned columns was unexpected!", 
metaData.getColumnCount(),
+          HsqldbTestServer.getFieldNames().length);
+
+      // should get back 4 rows. They are:
+      // 1 2
+      // 3 4
+      // 5 6
+      // 7 8
+      // .. so while order isn't guaranteed, we should get back 16 on the left 
and 20 on the right.
+      int sumCol1 = 0, sumCol2 = 0, rowCount = 0;
+      while (results.next()) {
+        rowCount++;
+        sumCol1 += results.getInt(1);
+        sumCol2 += results.getInt(2);
+      }
+
+      assertEquals("Expected 4 rows back", EXPECTED_NUM_ROWS, rowCount);
+      assertEquals("Expected left sum of 16", EXPECTED_COL1_SUM, sumCol1);
+      assertEquals("Expected right sum of 20", EXPECTED_COL2_SUM, sumCol2);
+    } catch (SQLException sqlException) {
+      fail("SQL Exception: " + sqlException.toString());
+    }
+  }
+
+  @Test
+  public void testReadMissingTable() {
+    try {
+      String [] colNames = { "*" };
+      ResultSet results = manager.readTable(MISSING_TABLE, colNames);
+      assertNull("Expected null resultset from readTable(MISSING_TABLE)", 
results);
+    } catch (SQLException sqlException) {
+      // we actually expect this. pass.
+    }
+  }
+
+  @Test
+  public void getPrimaryKeyFromMissingTable() {
+    String primaryKey = manager.getPrimaryKey(MISSING_TABLE);
+    assertNull("Expected null pkey for missing table", primaryKey);
+  }
+
+  @Test
+  public void getPrimaryKeyFromTableWithoutKey() {
+    String primaryKey = manager.getPrimaryKey(HsqldbTestServer.getTableName());
+    assertNull("Expected null pkey for table without key", primaryKey);
+  }
+
+  // constants for getPrimaryKeyFromTable()
+  static final String TABLE_WITH_KEY = "TABLE_WITH_KEY";
+  static final String KEY_FIELD_NAME = "KEYFIELD";
+
+  @Test
+  public void getPrimaryKeyFromTable() {
+    // first, create a table with a primary key
+    Connection conn = null;
+    try {
+      conn = testServer.getConnection();
+      PreparedStatement statement = conn.prepareStatement(
+          "CREATE TABLE " + TABLE_WITH_KEY + "(" + KEY_FIELD_NAME
+          + " INT NOT NULL PRIMARY KEY, foo INT)",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+    } catch (SQLException sqlException) {
+      fail("Could not create table with primary key: " + 
sqlException.toString());
+    } finally {
+      if (null != conn) {
+        try {
+          conn.close();
+        } catch (SQLException sqlE) {
+          LOG.warn("Got SQLException during close: " + sqlE.toString());
+        }
+      }
+    }
+
+    String primaryKey = manager.getPrimaryKey(TABLE_WITH_KEY);
+    assertEquals("Expected null pkey for table without key", primaryKey, 
KEY_FIELD_NAME);
+  }
+}

Added: 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java
URL: 
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java?rev=778646&view=auto
==============================================================================
--- 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java
 (added)
+++ 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java
 Tue May 26 10:29:38 2009
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.orm;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.SQLException;
+
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.manager.ConnManager;
+import org.apache.hadoop.sqoop.testutil.DirUtil;
+import org.apache.hadoop.sqoop.testutil.HsqldbTestServer;
+import org.apache.hadoop.sqoop.testutil.ImportJobTestCase;
+
+/**
+ * Test that the ClassWriter generates Java classes based on the given table,
+ * which compile.
+ *
+ * 
+ */
+public class TestClassWriter extends TestCase {
+
+  public static final Log LOG =
+      LogFactory.getLog(TestClassWriter.class.getName());
+
+  // instance variables populated during setUp, used during tests
+  private HsqldbTestServer testServer;
+  private ConnManager manager;
+  private ImportOptions options;
+
+  @Before
+  public void setUp() {
+    testServer = new HsqldbTestServer();
+    try {
+      testServer.resetServer();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    } catch (ClassNotFoundException cnfe) {
+      LOG.error("Could not find class for db driver: " + cnfe.toString());
+      fail("Could not find class for db driver: " + cnfe.toString());
+    }
+
+    manager = testServer.getManager();
+    options = testServer.getImportOptions();
+  }
+
+  @After
+  public void tearDown() {
+    try {
+      manager.close();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    }
+  }
+
+  static final String CODE_GEN_DIR = ImportJobTestCase.TEMP_BASE_DIR + 
"sqoop/test/codegen";
+  static final String JAR_GEN_DIR = ImportJobTestCase.TEMP_BASE_DIR + 
"sqoop/test/jargen";
+
+  /**
+   * Test that we can generate code. Test that we can redirect the --outdir 
and --bindir too.
+   */
+  @Test
+  public void testCodeGen() {
+
+    // sanity check: make sure we're in a tmp dir before we blow anything away.
+    assertTrue("Test generates code in non-tmp dir!",
+        CODE_GEN_DIR.startsWith(ImportJobTestCase.TEMP_BASE_DIR));
+    assertTrue("Test generates jars in non-tmp dir!",
+        JAR_GEN_DIR.startsWith(ImportJobTestCase.TEMP_BASE_DIR));
+
+    // start out by removing these directories ahead of time
+    // to ensure that this is truly generating the code.
+    File codeGenDirFile = new File(CODE_GEN_DIR);
+    File classGenDirFile = new File(JAR_GEN_DIR);
+
+    if (codeGenDirFile.exists()) {
+      DirUtil.deleteDir(codeGenDirFile);
+    }
+
+    if (classGenDirFile.exists()) {
+      DirUtil.deleteDir(classGenDirFile);
+    }
+
+    // Set the option strings in an "argv" to redirect our srcdir and bindir
+    String [] argv = {
+        "--bindir",
+        JAR_GEN_DIR,
+        "--outdir",
+        CODE_GEN_DIR
+    };
+
+    try {
+      options.parse(argv);
+    } catch (InvalidOptionsException ioe) {
+      LOG.error("Could not parse options: " + ioe.toString());
+    }
+
+    CompilationManager compileMgr = new CompilationManager(options);
+    ClassWriter writer = new ClassWriter(options, manager, 
HsqldbTestServer.getTableName(),
+        compileMgr);
+
+    try {
+      writer.generate();
+      compileMgr.compile();
+      compileMgr.jar();
+    } catch (IOException ioe) {
+      LOG.error("Got IOException: " + ioe.toString());
+      fail("Got IOException: " + ioe.toString());
+    }
+
+    File tableFile = new File(codeGenDirFile, HsqldbTestServer.getTableName() 
+ ".java");
+    assertTrue("Cannot find generated source file for table!", 
tableFile.exists());
+
+    File tableClassFile = new File(classGenDirFile, 
HsqldbTestServer.getTableName() + ".class");
+    assertTrue("Cannot find generated class file for table!", 
tableClassFile.exists());
+
+    File jarFile = new File(compileMgr.getJarFilename());
+    assertTrue("Cannot find compiled jar", jarFile.exists());
+  }
+}
+

Added: 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/DirUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/DirUtil.java?rev=778646&view=auto
==============================================================================
--- 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/DirUtil.java
 (added)
+++ 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/DirUtil.java
 Tue May 26 10:29:38 2009
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.testutil;
+
+import java.io.File;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/**
+ * Misc directory operations
+ * 
+ *
+ */
+public final class DirUtil {
+
+  public static final Log LOG = LogFactory.getLog(DirUtil.class.getName());
+
+  /**
+   * recursively delete a dir and its children.
+   * @param dir
+   * @return true on succesful removal of a dir
+   */
+  public static boolean deleteDir(File dir) {
+    if (dir.isDirectory()) {
+      String [] children = dir.list();
+      for (int i = 0; i < children.length; i++) {
+        File f = new File(dir, children[i]);
+        boolean success = deleteDir(f);
+        if (!success) {
+          LOG.warn("Could not delete " + f.getAbsolutePath());
+          return false;
+        }
+      }
+    }
+
+    // The directory is now empty so delete it too.
+    return dir.delete();
+  }
+
+}

Added: 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java?rev=778646&view=auto
==============================================================================
--- 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java
 (added)
+++ 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java
 Tue May 26 10:29:38 2009
@@ -0,0 +1,243 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.testutil;
+
+import java.util.Arrays;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.hsqldb.Server;
+
+import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.manager.ConnManager;
+import org.apache.hadoop.sqoop.manager.HsqldbManager;
+
+/**
+ * Create a simple hsqldb server and schema to use for testing.
+ * 
+ *
+ */
+public class HsqldbTestServer {
+  public static final Log LOG =
+    LogFactory.getLog(HsqldbTestServer.class.getName());
+
+  // singleton server instance.
+  private static Server server;
+
+  // When we create databases in HSqlDb, where do we put the files?
+  private static final String DATABASE_DATA_DIR = "./hsqldb-data";
+
+  private static final String DATABASE_NAME = "db1";
+
+  // hsqldb always capitalizes table and column names
+  private static final String DUMMY_TABLE_NAME = "TWOINTTABLE";
+  private static final String [] TWO_INT_TABLE_FIELDS = {"INTFIELD1", 
"INTFIELD2"};
+
+  private static final String EMPLOYEE_TABLE_NAME = "EMPLOYEES";
+
+  private static final String DB_URL = "jdbc:hsqldb:hsql://localhost/" + 
DATABASE_NAME;
+  private static final String DRIVER_CLASS = "org.hsqldb.jdbcDriver";
+
+  // all user-created HSQLDB tables are in the "PUBLIC" schema when connected 
to a database.
+  private static final String HSQLDB_SCHEMA_NAME = "PUBLIC";
+
+  public static String getSchemaName() {
+    return HSQLDB_SCHEMA_NAME;
+  }
+
+  public static String [] getFieldNames() {
+    return Arrays.copyOf(TWO_INT_TABLE_FIELDS, TWO_INT_TABLE_FIELDS.length);
+  }
+
+  public static String getUrl() {
+    return DB_URL;
+  }
+
+  public static String getTableName() {
+    return DUMMY_TABLE_NAME;
+  }
+
+  public static String getDatabaseName() {
+    return DATABASE_NAME;
+  }
+
+  /**
+   * start the server
+   */
+  public void start() {
+    if (null == server) {
+      LOG.info("Starting new hsqldb server; database=" + DATABASE_NAME + "; 
dir="
+          + DATABASE_DATA_DIR);
+      server = new Server();
+      server.setDatabasePath(0, DATABASE_DATA_DIR);
+      server.setDatabaseName(0, DATABASE_NAME);
+      server.start();
+    }
+  }
+
+  public Connection getConnection() throws SQLException {
+    try {
+      Class.forName(DRIVER_CLASS);
+    } catch (ClassNotFoundException cnfe) {
+      LOG.error("Could not get connection; driver class not found: " + 
DRIVER_CLASS);
+      return null;
+    }
+
+    Connection connection = DriverManager.getConnection(DB_URL);
+    connection.setAutoCommit(false);
+    return connection;
+  }
+
+  /**
+   * Create a table
+   */
+  public void createSchema() throws SQLException {
+
+    Connection connection = null;
+    Statement st = null;
+
+    try {
+      connection = getConnection();
+
+      st = connection.createStatement();
+      st.executeUpdate("DROP TABLE " + DUMMY_TABLE_NAME + " IF EXISTS");
+      st.executeUpdate("CREATE TABLE " + DUMMY_TABLE_NAME + "(intField1 INT, 
intField2 INT)");
+
+      connection.commit();
+    } finally {
+      if (null != st) {
+        st.close();
+      }
+
+      if (null != connection) {
+        connection.close();
+      }
+    }
+  }
+
+  /**
+   * @return the sum of the integers in the first column of TWOINTTABLE.
+   */
+  public static int getFirstColSum() {
+    return 1 + 3 + 5 + 7;
+  }
+
+  /**
+   * Fill the table with some data
+   */
+  public void populateData() throws SQLException {
+
+    Connection connection = null;
+    Statement st = null;
+
+    try {
+      connection = getConnection();
+
+      st = connection.createStatement();
+      st.executeUpdate("INSERT INTO " + DUMMY_TABLE_NAME + " VALUES(1, 8)");
+      st.executeUpdate("INSERT INTO " + DUMMY_TABLE_NAME + " VALUES(3, 6)");
+      st.executeUpdate("INSERT INTO " + DUMMY_TABLE_NAME + " VALUES(5, 4)");
+      st.executeUpdate("INSERT INTO " + DUMMY_TABLE_NAME + " VALUES(7, 2)");
+
+      connection.commit();
+    } finally {
+      if (null != st) {
+        st.close();
+      }
+
+      if (null != connection) {
+        connection.close();
+      }
+    }
+  }
+
+  public void createEmployeeDemo() throws SQLException, ClassNotFoundException 
{
+    Class.forName(DRIVER_CLASS);
+
+    Connection connection = null;
+    Statement st = null;
+
+    try {
+      connection = getConnection();
+
+      st = connection.createStatement();
+      st.executeUpdate("DROP TABLE " + EMPLOYEE_TABLE_NAME + " IF EXISTS");
+      st.executeUpdate("CREATE TABLE " + EMPLOYEE_TABLE_NAME
+          + "(emp_id INT NOT NULL PRIMARY KEY, name VARCHAR(64))");
+
+      st.executeUpdate("INSERT INTO " + EMPLOYEE_TABLE_NAME + " VALUES(1, 
'Aaron')");
+      st.executeUpdate("INSERT INTO " + EMPLOYEE_TABLE_NAME + " VALUES(2, 
'Joe')");
+      st.executeUpdate("INSERT INTO " + EMPLOYEE_TABLE_NAME + " VALUES(3, 
'Jim')");
+      st.executeUpdate("INSERT INTO " + EMPLOYEE_TABLE_NAME + " VALUES(4, 
'Lisa')");
+
+      connection.commit();
+    } finally {
+      if (null != st) {
+        st.close();
+      }
+
+      if (null != connection) {
+        connection.close();
+      }
+    }
+  }
+
+  /**
+   * Delete any existing tables.
+   */
+  public void dropExistingSchema() throws SQLException {
+    ConnManager mgr = getManager();
+    String [] tables = mgr.listTables();
+    if (null != tables) {
+      Connection conn = mgr.getConnection();
+      for (String table : tables) {
+        Statement s = conn.createStatement();
+        s.executeUpdate("DROP TABLE " + table);
+        conn.commit();
+        s.close();
+      }
+    }
+  }
+
+  /**
+   * Creates an hsqldb server, fills it with tables and data.
+   */
+  public void resetServer() throws ClassNotFoundException, SQLException {
+    start();
+    dropExistingSchema();
+    createSchema();
+    populateData();
+  }
+
+  public ImportOptions getImportOptions() {
+    return new ImportOptions(HsqldbTestServer.getUrl(),
+        HsqldbTestServer.getTableName());
+  }
+
+  public ConnManager getManager() {
+    return new HsqldbManager(getImportOptions());
+  }
+
+
+}

Added: 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
URL: 
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java?rev=778646&view=auto
==============================================================================
--- 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
 (added)
+++ 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java
 Tue May 26 10:29:38 2009
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.testutil;
+
+import java.io.File;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.log4j.BasicConfigurator;
+import org.junit.After;
+import org.junit.Before;
+
+import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.Sqoop;
+import org.apache.hadoop.sqoop.ImportOptions.InvalidOptionsException;
+import org.apache.hadoop.sqoop.manager.ConnManager;
+import org.apache.hadoop.sqoop.orm.CompilationManager;
+import org.apache.hadoop.sqoop.util.ClassLoaderStack;
+
+import junit.framework.TestCase;
+
+/**
+ * Class that implements common methods required for tests which import data
+ * from SQL into HDFS and verify correct import.
+ *
+ * 
+ *
+ */
+public class ImportJobTestCase extends TestCase {
+
+  public static final Log LOG = 
LogFactory.getLog(ImportJobTestCase.class.getName());
+
+  /** Base directory for all temporary data */
+  public static final String TEMP_BASE_DIR;
+
+  /** Where to import table data to in the local filesystem for testing */
+  public static final String LOCAL_WAREHOUSE_DIR;
+
+  // Initializer for the above
+  static {
+    String tmpDir = System.getProperty("test.build.data", "/tmp/");
+    if (!tmpDir.endsWith(File.separator)) {
+      tmpDir = tmpDir + File.separator;
+    }
+
+    TEMP_BASE_DIR = tmpDir;
+    LOCAL_WAREHOUSE_DIR = TEMP_BASE_DIR + "sqoop/warehouse";
+  }
+
+  /**
+   * Because of how classloading works, we don't actually want to name
+   * all the tables the same thing -- they'll actually just use the same
+   * implementation of the Java class that was classloaded before. So we
+   * use this counter to uniquify table names.
+   */
+  private static int tableNum = 0;
+
+  /** the name of a table that we'll populate with items for each test. */
+  static final String TABLE_NAME = "IMPORT_TABLE_";
+
+  protected String getTableName() {
+    return TABLE_NAME + Integer.toString(tableNum);
+  }
+
+  protected String getWarehouseDir() {
+    return LOCAL_WAREHOUSE_DIR;
+  }
+
+  private String [] colNames;
+  protected String [] getColNames() {
+    return colNames;
+  }
+
+  protected HsqldbTestServer getTestServer() {
+    return testServer;
+  }
+
+  protected ConnManager getManager() {
+    return manager;
+  }
+
+  // instance variables populated during setUp, used during tests
+  private HsqldbTestServer testServer;
+  private ConnManager manager;
+
+  private static boolean isLog4jConfigured = false;
+
+  protected void incrementTableNum() {
+    tableNum++;
+  }
+
+  @Before
+  public void setUp() {
+
+    incrementTableNum();
+
+    if (!isLog4jConfigured) {
+      BasicConfigurator.configure();
+      isLog4jConfigured = true;
+      LOG.info("Configured log4j with console appender.");
+    }
+
+    testServer = new HsqldbTestServer();
+    try {
+      testServer.resetServer();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    } catch (ClassNotFoundException cnfe) {
+      LOG.error("Could not find class for db driver: " + cnfe.toString());
+      fail("Could not find class for db driver: " + cnfe.toString());
+    }
+
+    manager = testServer.getManager();
+  }
+
+  @After
+  public void tearDown() {
+    try {
+      manager.close();
+    } catch (SQLException sqlE) {
+      LOG.error("Got SQLException: " + sqlE.toString());
+      fail("Got SQLException: " + sqlE.toString());
+    }
+  }
+
+  static final String BASE_COL_NAME = "DATA_COL";
+
+  /**
+   * Create a table with a set of columns and add a row of values.
+   * @param colTypes the types of the columns to make
+   * @param vals the SQL text for each value to insert
+   */
+  protected void createTableWithColTypes(String [] colTypes, String [] vals) {
+    Connection conn = null;
+    try {
+      conn = getTestServer().getConnection();
+      PreparedStatement statement = conn.prepareStatement(
+          "DROP TABLE " + getTableName() + " IF EXISTS",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+      statement.close();
+
+      String columnDefStr = "";
+      String columnListStr = "";
+      String valueListStr = "";
+
+      String [] myColNames = new String[colTypes.length];
+
+      for (int i = 0; i < colTypes.length; i++) {
+        String colName = BASE_COL_NAME + Integer.toString(i);
+        columnDefStr += colName + " " + colTypes[i];
+        columnListStr += colName;
+        valueListStr += vals[i];
+        myColNames[i] = colName;
+        if (i < colTypes.length - 1) {
+          columnDefStr += ", ";
+          columnListStr += ", ";
+          valueListStr += ", ";
+        }
+      }
+
+      statement = conn.prepareStatement(
+          "CREATE TABLE " + getTableName() + "(" + columnDefStr + ")",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+      statement.close();
+
+      statement = conn.prepareStatement(
+          "INSERT INTO " + getTableName() + "(" + columnListStr + ")"
+          + " VALUES(" + valueListStr + ")",
+          ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      statement.executeUpdate();
+      statement.close();
+      conn.commit();
+      this.colNames = myColNames;
+    } catch (SQLException sqlException) {
+      fail("Could not create table: " + sqlException.toString());
+    } finally {
+      if (null != conn) {
+        try {
+          conn.close();
+        } catch (SQLException sqlE) {
+          LOG.warn("Got SQLException during close: " + sqlE.toString());
+        }
+      }
+    }
+  }
+
+  /**
+   * Create a table with a single column and put a data element in it.
+   * @param colType the type of the column to create
+   * @param val the value to insert (reformatted as a string)
+   */
+  protected void createTableForColType(String colType, String val) {
+    String [] types = { colType };
+    String [] vals = { val };
+
+    createTableWithColTypes(types, vals);
+  }
+
+  /**
+   * verify that the single-column single-row result can be read back from the 
db.
+   *
+   */
+  protected void verifyReadback(int colNum, String expectedVal) {
+    try {
+      ResultSet results = getManager().readTable(getTableName(), 
getColNames());
+      assertNotNull("Null results from readTable()!", results);
+      assertTrue("Expected at least one row returned", results.next());
+      String resultVal = results.getString(colNum);
+      if (null != expectedVal) {
+        assertNotNull("Expected non-null result value", resultVal);
+      }
+
+      assertEquals("Error reading inserted value back from db", expectedVal, 
resultVal);
+      assertFalse("Expected at most one row returned", results.next());
+      results.close();
+    } catch (SQLException sqlE) {
+      fail("Got SQLException: " + sqlE.toString());
+    }
+  }
+
+  /**
+   * Create the argv to pass to Sqoop
+   * @param includeHadoopFlags if true, then include -D various.settings=values
+   * @param colNames the columns to import. If null, all columns are used.
+   * @return the argv as an array of strings.
+   */
+  private String [] getArgv(boolean includeHadoopFlags, String [] colNames) {
+    if (null == colNames) {
+      colNames = getColNames();
+    }
+
+    String orderByCol = colNames[0];
+    String columnsString = "";
+    for (String col : colNames) {
+      columnsString += col + ",";
+    }
+
+    ArrayList<String> args = new ArrayList<String>();
+
+    if (includeHadoopFlags) {
+      args.add("-D");
+      args.add("mapred.job.tracker=local");
+      args.add("-D");
+      args.add("mapred.map.tasks=1");
+      args.add("-D");
+      args.add("fs.default.name=file:///");
+    }
+
+    args.add("--table");
+    args.add(getTableName());
+    args.add("--columns");
+    args.add(columnsString);
+    args.add("--order-by");
+    args.add(orderByCol);
+    args.add("--warehouse-dir");
+    args.add(getWarehouseDir());
+    args.add("--connect");
+    args.add(HsqldbTestServer.getUrl());
+    args.add("--as-sequencefile");
+
+    return args.toArray(new String[0]);
+  }
+
+  protected Path getTablePath() {
+    Path warehousePath = new Path(getWarehouseDir());
+    Path tablePath = new Path(warehousePath, getTableName());
+    return tablePath;
+  }
+
+  protected Path getDataFilePath() {
+    return new Path(getTablePath(), "part-00000");
+  }
+
+  protected void removeTableDir() {
+    File tableDirFile = new File(getTablePath().toString());
+    if (tableDirFile.exists()) {
+      // Remove the director where the table will be imported to,
+      // prior to running the MapReduce job.
+      if (!DirUtil.deleteDir(tableDirFile)) {
+        LOG.warn("Could not delete table directory: " + 
tableDirFile.getAbsolutePath());
+      }
+    }
+  }
+
+  /**
+   * Do a MapReduce-based import of the table and verify that the results
+   * were imported as expected. (tests readFields(ResultSet) and toString())
+   * @param expectedVal the value we injected into the table.
+   * @param importCols the columns to import. If null, all columns are used.
+   */
+  protected void verifyImport(String expectedVal, String [] importCols) {
+
+    // paths to where our output file will wind up.
+    Path dataFilePath = getDataFilePath();
+
+    removeTableDir();
+
+    // run the tool through the normal entry-point.
+    int ret;
+    try {
+      Sqoop importer = new Sqoop();
+      ret = ToolRunner.run(importer, getArgv(true, importCols));
+    } catch (Exception e) {
+      LOG.error("Got exception running Sqoop: " + e.toString());
+      e.printStackTrace();
+      ret = 1;
+    }
+
+    // expect a successful return.
+    assertEquals("Failure during job", 0, ret);
+
+    ImportOptions opts = new ImportOptions();
+    try {
+      opts.parse(getArgv(false, importCols));
+    } catch (InvalidOptionsException ioe) {
+      fail(ioe.toString());
+    }
+    CompilationManager compileMgr = new CompilationManager(opts);
+    String jarFileName = compileMgr.getJarFilename();
+    ClassLoader prevClassLoader = null;
+    try {
+      prevClassLoader = ClassLoaderStack.addJarFile(jarFileName, 
getTableName());
+
+      // now actually open the file and check it
+      File f = new File(dataFilePath.toString());
+      assertTrue("Error: " + dataFilePath.toString() + " does not exist", 
f.exists());
+
+      Object readValue = SeqFileReader.getFirstValue(dataFilePath.toString());
+      if (null == expectedVal) {
+        assertEquals("Error validating result from SeqFile", "null", 
readValue.toString());
+      } else {
+        assertEquals("Error validating result from SeqFile", expectedVal, 
readValue.toString());
+      }
+    } catch (IOException ioe) {
+      fail("IOException: " + ioe.toString());
+    } finally {
+      if (null != prevClassLoader) {
+        ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
+      }
+    }
+  }
+
+  /**
+   * Run a MapReduce-based import (using the argv provided to control 
execution).
+   */
+  protected void runImport(String [] argv) throws IOException {
+    removeTableDir();
+
+    // run the tool through the normal entry-point.
+    int ret;
+    try {
+      Sqoop importer = new Sqoop();
+      ret = ToolRunner.run(importer, argv);
+    } catch (Exception e) {
+      LOG.error("Got exception running Sqoop: " + e.toString());
+      e.printStackTrace();
+      ret = 1;
+    }
+
+    // expect a successful return.
+    assertEquals("Failure during job", 0, ret);
+  }
+
+}

Added: 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/SeqFileReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/SeqFileReader.java?rev=778646&view=auto
==============================================================================
--- 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/SeqFileReader.java
 (added)
+++ 
hadoop/core/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/SeqFileReader.java
 Tue May 26 10:29:38 2009
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.testutil;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.SequenceFile.Reader;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * Utility class to help with test cases. Just reads the first (k, v) pair
+ * from a SequenceFile and returns the value part.
+ * 
+ *
+ */
+public final class SeqFileReader {
+
+  public static final Log LOG = 
LogFactory.getLog(SeqFileReader.class.getName());
+
+  public static Reader getSeqFileReader(String filename) throws IOException {
+    // read from local filesystem
+    Configuration conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    FileSystem fs = FileSystem.get(conf);
+    LOG.info("Opening SequenceFile " + filename);
+    return new SequenceFile.Reader(fs, new Path(filename), conf);
+  }
+
+  public static Object getFirstValue(String filename) throws IOException {
+    Reader r = null;
+    try {
+      // read from local filesystem
+      Configuration conf = new Configuration();
+      conf.set("fs.default.name", "file:///");
+      FileSystem fs = FileSystem.get(conf);
+      r = new SequenceFile.Reader(fs, new Path(filename), conf);
+      Object key = ReflectionUtils.newInstance(r.getKeyClass(), conf);
+      Object val = ReflectionUtils.newInstance(r.getValueClass(), conf);
+      LOG.info("Reading value of type " + r.getValueClassName()
+          + " from SequenceFile " + filename);
+      r.next(key);
+      r.getCurrentValue(val);
+      LOG.info("Value as string: " + val.toString());
+      return val;
+    } finally {
+      if (null != r) {
+        try {
+          r.close();
+        } catch (IOException ioe) {
+          LOG.warn("IOException during close: " + ioe.toString());
+        }
+      }
+    }
+  }
+}
+


Reply via email to