Markus Kemper created SQOOP-3175:
------------------------------------

             Summary: Sqoop1 (import + --as-parquetfile) writes data to wrong 
Hive table if same table name exists in Hive default database
                 Key: SQOOP-3175
                 URL: https://issues.apache.org/jira/browse/SQOOP-3175
             Project: Sqoop
          Issue Type: Bug
            Reporter: Markus Kemper


Sqoop1 (import + --as-parquetfile) writes data to wrong Hive table if same 
table name exists in Hive default database.  The test case below demonstrates 
this issue.

*Test Case*
{noformat}
########################################################################
# Issue: Data files written to the wrong table with Parquet
########################################################################

#################
# STEP 01 - Create Table and Data
#################

export MYCONN=jdbc:mysql://mysql.cloudera.com:3306/sqoop
export MYUSER=sqoop
export MYPSWD=cloudera

sqoop eval --connect $MYCONN --username $MYUSER --password $MYPSWD --query 
"drop table t1"
sqoop eval --connect $MYCONN --username $MYUSER --password $MYPSWD --query 
"create table t1 (c1 int, c2 date, c3 varchar(10))"
sqoop eval --connect $MYCONN --username $MYUSER --password $MYPSWD --query 
"insert into t1 values (1, current_date, 'new row 1')"
sqoop eval --connect $MYCONN --username $MYUSER --password $MYPSWD --query 
"select * from t1"

-----------------------------------------
| c1          | c2         | c3         | 
-----------------------------------------
| 1           | 2017-04-22 | new row 1  | 
-----------------------------------------

#################
# STEP 02 - Create HDFS Structures
#################

beeline -u jdbc:hive2:// -e "drop database db1 cascade; drop database db2 
cascade; drop database db3 cascade;"
sudo -u hdfs hdfs dfs -rm -r /data/tmp
sudo -u hdfs hdfs dfs -rm -r /data/dbs
sudo -u hdfs hdfs dfs -mkdir -p /data/tmp
sudo -u hdfs hdfs dfs -chmod 777 /data/tmp
sudo -u hdfs hdfs dfs -mkdir -p /data/dbs
sudo -u hdfs hdfs dfs -chmod 777 /data/dbs

#################
# STEP 03 - Create Hive Databases
#################

beeline -u jdbc:hive2:// -e "create database db1 location '/data/dbs/db1'; 
create database db2 location '/data/dbs/db2';"
beeline -u jdbc:hive2:// -e "show databases; describe database default; 
describe database db1; describe database db2;"
beeline -u jdbc:hive2:// -e "use default; show tables; use db1; show tables; 
use db2; show tables;"
hdfs dfs -ls -R /user/hive/warehouse /data

+----------+------------------------+------------------------------------------+-------------+-------------+-------------+--+
| db_name  |        comment         |                 location                 
| owner_name  | owner_type  | parameters  |
+----------+------------------------+------------------------------------------+-------------+-------------+-------------+--+
| default  | Default Hive database  | hdfs://nameservice1/user/hive/warehouse  
| public      | ROLE        |             |
+----------+------------------------+------------------------------------------+-------------+-------------+-------------+--+
+----------+----------+-----------------------------------+-------------+-------------+-------------+--+
| db_name  | comment  |             location              | owner_name  | 
owner_type  | parameters  |
+----------+----------+-----------------------------------+-------------+-------------+-------------+--+
| db1      |          | hdfs://nameservice1/data/dbs/db1  | root        | USER  
      |             |
+----------+----------+-----------------------------------+-------------+-------------+-------------+--+
+----------+----------+-----------------------------------+-------------+-------------+-------------+--+
| db_name  | comment  |             location              | owner_name  | 
owner_type  | parameters  |
+----------+----------+-----------------------------------+-------------+-------------+-------------+--+
| db2      |          | hdfs://nameservice1/data/dbs/db2  | root        | USER  
      |             |
+----------+----------+-----------------------------------+-------------+-------------+-------------+--+
~~~~~
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
~~~~~
drwxrwxrwx   - root supergroup          0 2017-04-22 16:22 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 16:22 /data/dbs/db2

#################
# STEP 04 - Import RDBMS Table, Create Hive Table in Database (default) and 
Load Data (using --as-textfile)
#################

sqoop import --connect $MYCONN --username $MYUSER --password $MYPSWD --table t1 
--target-dir /data/tmp/t1 --hive-import --hive-database default --hive-table t1 
--as-textfile --num-mappers 1
hdfs dfs -ls -R /user/hive/warehouse
beeline -u jdbc:hive2:// -e "select * from default.t1;"

Output:
17/04/22 16:37:05 INFO mapreduce.ImportJobBase: Transferred 23 bytes in 22.6016 
seconds (1.0176 bytes/sec)
17/04/22 16:37:05 INFO mapreduce.ImportJobBase: Retrieved 1 records.
~~~~~
drwxrwxrwt   - root hive          0 2017-04-22 16:37 /user/hive/warehouse/t1
-rw-r--r--   3 root supergroup         23 2017-04-22 16:37 
/user/hive/warehouse/t1/part-m-00000
~~~~~
+--------+-------------+------------+--+
| t1.c1  |    t1.c2    |   t1.c3    |
+--------+-------------+------------+--+
| 1      | 2017-04-22  | new row 1  |
+--------+-------------+------------+--+

#################
# STEP 05 - Import RDBMS Table, Create Hive Table in Database (d1) and Load 
Data (using --as-textfile)
#################

sqoop import --connect $MYCONN --username $MYUSER --password $MYPSWD --table t1 
--target-dir /data/tmp/t1 --hive-import --hive-database db1 --hive-table t1 
--as-textfile --num-mappers 1
hdfs dfs -ls -R /data/dbs/db1/t1
beeline -u jdbc:hive2:// -e "select * from default.t1; select * from db1.t1;"

Output:
17/04/22 16:39:49 INFO mapreduce.ImportJobBase: Transferred 23 bytes in 21.4485 
seconds (1.0723 bytes/sec)
17/04/22 16:39:49 INFO mapreduce.ImportJobBase: Retrieved 1 records.
~~~~~
-rwxrwxrwx   3 root supergroup         23 2017-04-22 16:39 
/data/dbs/db1/t1/part-m-00000
~~~~~
+--------+-------------+------------+--+
| t1.c1  |    t1.c2    |   t1.c3    |
+--------+-------------+------------+--+
| 1      | 2017-04-22  | new row 1  |
+--------+-------------+------------+--+
+--------+-------------+------------+--+
| t1.c1  |    t1.c2    |   t1.c3    |
+--------+-------------+------------+--+
| 1      | 2017-04-22  | new row 1  |
+--------+-------------+------------+--+

#################
# STEP 06 - Import RDBMS Table, Create Hive Table in Database (d2) and Load 
Data (using --as-textfile)
#################

sqoop import --connect $MYCONN --username $MYUSER --password $MYPSWD --table t1 
--target-dir /data/tmp/t1 --hive-import --hive-database db2 --hive-table t1 
--as-textfile --num-mappers 1
hdfs dfs -ls -R /data/dbs/db2/t1
beeline -u jdbc:hive2:// -e "select * from default.t1; select * from db1.t1; 
select * from db2.t1;"

Output:
17/04/22 16:42:18 INFO mapreduce.ImportJobBase: Transferred 23 bytes in 20.4574 
seconds (1.1243 bytes/sec)
17/04/22 16:42:18 INFO mapreduce.ImportJobBase: Retrieved 1 records.
~~~~~
-rwxrwxrwx   3 root supergroup         23 2017-04-22 16:42 
/data/dbs/db2/t1/part-m-00000
~~~~~
+--------+-------------+------------+--+
| t1.c1  |    t1.c2    |   t1.c3    |
+--------+-------------+------------+--+
| 1      | 2017-04-22  | new row 1  |
+--------+-------------+------------+--+
+--------+-------------+------------+--+
| t1.c1  |    t1.c2    |   t1.c3    |
+--------+-------------+------------+--+
| 1      | 2017-04-22  | new row 1  |
+--------+-------------+------------+--+
+--------+-------------+------------+--+
| t1.c1  |    t1.c2    |   t1.c3    |
+--------+-------------+------------+--+
| 1      | 2017-04-22  | new row 1  |
+--------+-------------+------------+--+

#################
# STEP 07 - Reset
#################

hdfs dfs -ls -R /user/hive/warehouse /data
beeline -u jdbc:hive2:// -e "use default; drop table t1; show tables; use db1; 
drop table t1; show tables; use db2; drop table t1; show tables;"
hdfs dfs -ls -R /user/hive/warehouse /data

Output:
drwxrwxrwt   - root hive          0 2017-04-22 16:37 /user/hive/warehouse/t1
-rw-r--r--   3 root supergroup         23 2017-04-22 16:37 
/user/hive/warehouse/t1/part-m-00000
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 16:39 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 16:39 /data/dbs/db1/t1
-rwxrwxrwx   3 root supergroup         23 2017-04-22 16:39 
/data/dbs/db1/t1/part-m-00000
drwxrwxrwx   - root supergroup          0 2017-04-22 16:42 /data/dbs/db2
drwxrwxrwx   - root supergroup          0 2017-04-22 16:42 /data/dbs/db2/t1
-rwxrwxrwx   3 root supergroup         23 2017-04-22 16:42 
/data/dbs/db2/t1/part-m-00000
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp
~~~~~
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
~~~~~
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db2
drwxrwxrwx   - root supergroup          0 2017-04-22 16:22 /data/dbs/db3
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp

#################
# STEP 08 - Import RDBMS Table, Create Hive Table in Database (default) and 
Load Data (using --as-parquetfile)
#################

sqoop import --connect $MYCONN --username $MYUSER --password $MYPSWD --table t1 
--target-dir /data/tmp/t1 --hive-import --hive-database default --hive-table t1 
--as-parquetfile --num-mappers 1
hdfs dfs -ls -R /user/hive/warehouse /data
beeline -u jdbc:hive2:// -e "select * from default.t1;"

Output:
17/04/22 16:52:57 INFO mapreduce.ImportJobBase: Transferred 2.0303 KB in 
38.6367 seconds (53.8089 bytes/sec)
17/04/22 16:52:57 INFO mapreduce.ImportJobBase: Retrieved 1 records.
~~~~~
drwxrwxrwt   - root hive          0 2017-04-22 16:52 /user/hive/warehouse/t1
drwxr-xr-x   - root hive                0 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata
drwxr-xr-x   - root hive                0 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata/schemas
-rw-r--r--   3 root hive              492 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata/schemas/1.avsc
drwxr-xr-x   - root hive                0 2017-04-22 16:52 
/user/hive/warehouse/t1/.signals
-rw-r--r--   3 root hive                0 2017-04-22 16:52 
/user/hive/warehouse/t1/.signals/unbounded
-rw-r--r--   3 root supergroup        806 2017-04-22 16:52 
/user/hive/warehouse/t1/491185ac-b156-4bc8-aca1-0ccb3761bab7.parquet
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db2
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp
~~~~~
+--------+----------------+------------+--+
| t1.c1  |     t1.c2      |   t1.c3    |
+--------+----------------+------------+--+
| 1      | 1492844400000  | new row 1  |
+--------+----------------+------------+--+

#################
# STEP 09 - Import RDBMS Table, Create Hive Table in Database (d1) and Load 
Data (using --as-parquetfile)
#################

sqoop import --connect $MYCONN --username $MYUSER --password $MYPSWD --table t1 
--target-dir /data/tmp/t1 --hive-import --hive-database db1 --hive-table t1 
--as-parquetfile --num-mappers 1
hdfs dfs -ls -R /user/hive/warehouse /data
beeline -u jdbc:hive2:// -e "select * from default.t1; select * from db1.t1;"

Output:
17/04/22 16:55:37 INFO mapreduce.ImportJobBase: Transferred 2.0264 KB in 
40.5875 seconds (51.1241 bytes/sec)
17/04/22 16:55:37 INFO mapreduce.ImportJobBase: Retrieved 1 records.
~~~~~
drwxrwxrwt   - root hive          0 2017-04-22 16:55 /user/hive/warehouse/t1
drwxr-xr-x   - root hive                0 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata
drwxr-xr-x   - root hive                0 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata/schemas
-rw-r--r--   3 root hive              492 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata/schemas/1.avsc
drwxr-xr-x   - root hive                0 2017-04-22 16:55 
/user/hive/warehouse/t1/.signals
-rw-r--r--   3 root hive                0 2017-04-22 16:55 
/user/hive/warehouse/t1/.signals/unbounded
-rw-r--r--   3 root supergroup        806 2017-04-22 16:52 
/user/hive/warehouse/t1/491185ac-b156-4bc8-aca1-0ccb3761bab7.parquet
-rw-r--r--   3 root supergroup        806 2017-04-22 16:55 
/user/hive/warehouse/t1/626bc984-5331-47b9-90e2-f212556839eb.parquet
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db2
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp
~~~~~
+--------+----------------+------------+--+
| t1.c1  |     t1.c2      |   t1.c3    |
+--------+----------------+------------+--+
| 1      | 1492844400000  | new row 1  |
| 1      | 1492844400000  | new row 1  | <=== should be in database db1
+--------+----------------+------------+--+
~~~~~
17/04/22 16:58:08 [main]: ERROR parse.SemanticAnalyzer: 
org.apache.hadoop.hive.ql.parse.SemanticException: Line 1:14 Table not found 
't1'
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.getMetaData(SemanticAnalyzer.java:1517)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.getMetaData(SemanticAnalyzer.java:1467)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genResolvedParseTree(SemanticAnalyzer.java:9987)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10038)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:9923)
        at 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:223)
        at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:491)
        at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1277)
        at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1264)
        at 
org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:186)
        at 
org.apache.hive.service.cli.operation.SQLOperation.runInternal(SQLOperation.java:267)
        at 
org.apache.hive.service.cli.operation.Operation.run(Operation.java:337)
        at 
org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:439)
        at 
org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:416)
        at 
org.apache.hive.service.cli.CLIService.executeStatementAsync(CLIService.java:282)
        at 
org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:501)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at 
org.apache.hive.jdbc.HiveConnection$SynchronizedHandler.invoke(HiveConnection.java:1299)
        at com.sun.proxy.$Proxy25.ExecuteStatement(Unknown Source)
        at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:246)
        at org.apache.hive.beeline.Commands.executeInternal(Commands.java:990)
        at org.apache.hive.beeline.Commands.execute(Commands.java:1192)
        at org.apache.hive.beeline.Commands.sql(Commands.java:1106)
        at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1169)
        at org.apache.hive.beeline.BeeLine.initArgs(BeeLine.java:833)
        at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:891)
        at 
org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:511)
        at org.apache.hive.beeline.BeeLine.main(BeeLine.java:494)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:136)

#################
# STEP 10 - Reset
#################

hdfs dfs -ls -R /user/hive/warehouse /data
beeline -u jdbc:hive2:// -e "use default; drop table t1; show tables; use db1; 
drop table t1; show tables; use db2; drop table t1; show tables;"
hdfs dfs -ls -R /user/hive/warehouse /data

Output:
drwxrwxrwt   - root hive          0 2017-04-22 16:55 /user/hive/warehouse/t1
drwxr-xr-x   - root hive                0 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata
drwxr-xr-x   - root hive                0 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata/schemas
-rw-r--r--   3 root hive              492 2017-04-22 16:52 
/user/hive/warehouse/t1/.metadata/schemas/1.avsc
drwxr-xr-x   - root hive                0 2017-04-22 16:55 
/user/hive/warehouse/t1/.signals
-rw-r--r--   3 root hive                0 2017-04-22 16:55 
/user/hive/warehouse/t1/.signals/unbounded
-rw-r--r--   3 root supergroup        806 2017-04-22 16:52 
/user/hive/warehouse/t1/491185ac-b156-4bc8-aca1-0ccb3761bab7.parquet
-rw-r--r--   3 root supergroup        806 2017-04-22 16:55 
/user/hive/warehouse/t1/626bc984-5331-47b9-90e2-f212556839eb.parquet
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db2
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp
~~~~~
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
+-----------+--+
| tab_name  |
+-----------+--+
+-----------+--+
~~~~~
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db2
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp

#################
# STEP 11 - Import RDBMS Table, Create Hive Table in Database (d1) and Load 
Data (using --as-parquetfile)
#################

sqoop import --connect $MYCONN --username $MYUSER --password $MYPSWD --table t1 
--target-dir /data/tmp/t1 --hive-import --hive-database db1 --hive-table t1 
--as-parquetfile --num-mappers 1
hdfs dfs -ls -R /user/hive/warehouse /data
beeline -u jdbc:hive2:// -e "select * from db1.t1;"

Output:
17/04/22 17:07:54 INFO mapreduce.ImportJobBase: Transferred 2.0264 KB in 
41.2402 seconds (50.3149 bytes/sec)
17/04/22 17:07:54 INFO mapreduce.ImportJobBase: Retrieved 1 records.
~~~~~
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 17:07 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 17:07 /data/dbs/db1/t1
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata/schemas
-rw-r--r--   3 root supergroup        492 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata/schemas/1.avsc
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.signals
-rw-r--r--   3 root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.signals/unbounded
-rw-r--r--   3 root supergroup        806 2017-04-22 17:07 
/data/dbs/db1/t1/726eb5ed-4c84-4b6c-a9aa-ebe410c519cb.parquet
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db2
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp
~~~~~
+--------+----------------+------------+--+
| t1.c1  |     t1.c2      |   t1.c3    |
+--------+----------------+------------+--+
| 1      | 1492844400000  | new row 1  |
+--------+----------------+------------+--+

#################
# STEP 12 - Import RDBMS Table, Create Hive Table in Database (default) and 
Load Data (using --as-parquetfile)
#################

sqoop import --connect $MYCONN --username $MYUSER --password $MYPSWD --table t1 
--target-dir /data/tmp/t1 --hive-import --hive-database default --hive-table t1 
--as-parquetfile --num-mappers 1
hdfs dfs -ls -R /user/hive/warehouse /data
beeline -u jdbc:hive2:// -e "select * from db1.t1; select * from default.t1;"

Output:
17/04/22 17:14:35 INFO mapreduce.ImportJobBase: Transferred 2.0303 KB in 
50.7407 seconds (40.973 bytes/sec)
17/04/22 17:14:35 INFO mapreduce.ImportJobBase: Retrieved 1 records.
~~~~~
drwxrwxrwt   - root hive          0 2017-04-22 17:14 /user/hive/warehouse/t1
drwxr-xr-x   - root hive                0 2017-04-22 17:13 
/user/hive/warehouse/t1/.metadata
drwxr-xr-x   - root hive                0 2017-04-22 17:13 
/user/hive/warehouse/t1/.metadata/schemas
-rw-r--r--   3 root hive              492 2017-04-22 17:13 
/user/hive/warehouse/t1/.metadata/schemas/1.avsc
drwxr-xr-x   - root hive                0 2017-04-22 17:14 
/user/hive/warehouse/t1/.signals
-rw-r--r--   3 root hive                0 2017-04-22 17:14 
/user/hive/warehouse/t1/.signals/unbounded
-rw-r--r--   3 root supergroup        806 2017-04-22 17:14 
/user/hive/warehouse/t1/a64f401a-74a1-4e87-a103-00a73415dcd0.parquet
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 17:07 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 17:07 /data/dbs/db1/t1
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata/schemas
-rw-r--r--   3 root supergroup        492 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata/schemas/1.avsc
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.signals
-rw-r--r--   3 root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.signals/unbounded
-rw-r--r--   3 root supergroup        806 2017-04-22 17:07 
/data/dbs/db1/t1/726eb5ed-4c84-4b6c-a9aa-ebe410c519cb.parquet
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db2
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp
~~~~~
+--------+----------------+------------+--+
| t1.c1  |     t1.c2      |   t1.c3    |
+--------+----------------+------------+--+
| 1      | 1492844400000  | new row 1  |
+--------+----------------+------------+--+
1 row selected (1.972 seconds)
OK
+--------+----------------+------------+--+
| t1.c1  |     t1.c2      |   t1.c3    |
+--------+----------------+------------+--+
| 1      | 1492844400000  | new row 1  |
+--------+----------------+------------+--+

#################
# STEP 13 - Import RDBMS Table, Create Hive Table in Database (d2) and Load 
Data (using --as-parquetfile)
#################

sqoop import --connect $MYCONN --username $MYUSER --password $MYPSWD --table t1 
--target-dir /data/tmp/t1 --hive-import --hive-database db2 --hive-table t1 
--as-parquetfile --num-mappers 1
hdfs dfs -ls -R /user/hive/warehouse /data
beeline -u jdbc:hive2:// -e "select * from db2.t1; select * from db1.t1; select 
* from default.t1;"
beeline -u jdbc:hive2:// -e "select * from db1.t1; select * from default.t1;"

Output:
17/04/22 17:17:47 INFO mapreduce.ImportJobBase: Transferred 2.0264 KB in 
40.6876 seconds (50.9983 bytes/sec)
17/04/22 17:17:47 INFO mapreduce.ImportJobBase: Retrieved 1 records.
~~~~~
drwxrwxrwt   - root hive          0 2017-04-22 17:17 /user/hive/warehouse/t1
drwxr-xr-x   - root hive                0 2017-04-22 17:13 
/user/hive/warehouse/t1/.metadata
drwxr-xr-x   - root hive                0 2017-04-22 17:13 
/user/hive/warehouse/t1/.metadata/schemas
-rw-r--r--   3 root hive              492 2017-04-22 17:13 
/user/hive/warehouse/t1/.metadata/schemas/1.avsc
drwxr-xr-x   - root hive                0 2017-04-22 17:17 
/user/hive/warehouse/t1/.signals
-rw-r--r--   3 root hive                0 2017-04-22 17:17 
/user/hive/warehouse/t1/.signals/unbounded
-rw-r--r--   3 root supergroup        806 2017-04-22 17:14 
/user/hive/warehouse/t1/a64f401a-74a1-4e87-a103-00a73415dcd0.parquet
-rw-r--r--   3 root supergroup        806 2017-04-22 17:17 
/user/hive/warehouse/t1/ebf04829-ac52-4175-9de7-445691b29788.parquet
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:22 /data/dbs
drwxrwxrwx   - root supergroup          0 2017-04-22 17:07 /data/dbs/db1
drwxrwxrwx   - root supergroup          0 2017-04-22 17:07 /data/dbs/db1/t1
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata/schemas
-rw-r--r--   3 root supergroup        492 2017-04-22 17:07 
/data/dbs/db1/t1/.metadata/schemas/1.avsc
drwxr-xr-x   - root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.signals
-rw-r--r--   3 root supergroup          0 2017-04-22 17:07 
/data/dbs/db1/t1/.signals/unbounded
-rw-r--r--   3 root supergroup        806 2017-04-22 17:07 
/data/dbs/db1/t1/726eb5ed-4c84-4b6c-a9aa-ebe410c519cb.parquet
drwxrwxrwx   - root supergroup          0 2017-04-22 16:48 /data/dbs/db2
drwxrwxrwx   - hdfs supergroup          0 2017-04-22 16:42 /data/tmp
~~~~~
17/04/22 17:20:02 [main]: ERROR parse.SemanticAnalyzer: 
org.apache.hadoop.hive.ql.parse.SemanticException: Line 1:14 Table not found 
't1'
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.getMetaData(SemanticAnalyzer.java:1517)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.getMetaData(SemanticAnalyzer.java:1467)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genResolvedParseTree(SemanticAnalyzer.java:9987)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10038)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:9923)
        at 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:223)
        at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:491)
        at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1277)
        at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1264)
        at 
org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:186)
        at 
org.apache.hive.service.cli.operation.SQLOperation.runInternal(SQLOperation.java:267)
        at 
org.apache.hive.service.cli.operation.Operation.run(Operation.java:337)
        at 
org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:439)
        at 
org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:416)
        at 
org.apache.hive.service.cli.CLIService.executeStatementAsync(CLIService.java:282)
        at 
org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:501)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at 
org.apache.hive.jdbc.HiveConnection$SynchronizedHandler.invoke(HiveConnection.java:1299)
        at com.sun.proxy.$Proxy25.ExecuteStatement(Unknown Source)
        at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:246)
        at org.apache.hive.beeline.Commands.executeInternal(Commands.java:990)
        at org.apache.hive.beeline.Commands.execute(Commands.java:1192)
        at org.apache.hive.beeline.Commands.sql(Commands.java:1106)
        at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1169)
        at org.apache.hive.beeline.BeeLine.initArgs(BeeLine.java:833)
        at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:891)
        at 
org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:511)
        at org.apache.hive.beeline.BeeLine.main(BeeLine.java:494)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
~~~~~
+--------+----------------+------------+--+
| t1.c1  |     t1.c2      |   t1.c3    |
+--------+----------------+------------+--+
| 1      | 1492844400000  | new row 1  |
+--------+----------------+------------+--+
+--------+----------------+------------+--+
| t1.c1  |     t1.c2      |   t1.c3    |
+--------+----------------+------------+--+
| 1      | 1492844400000  | new row 1  |
| 1      | 1492844400000  | new row 1  |  <=== should be in database db1
+--------+----------------+------------+--+
{noformat}



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

Reply via email to