[ 
https://issues.apache.org/jira/browse/CARBONDATA-1041?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Vinod Rohilla updated CARBONDATA-1041:
--------------------------------------
    Description: 
Null displays in timestamp data type in Spark 1.6

Steps to reproduces:

1: CarbonData :

a) create table:

CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, 
DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 
bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 
decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 
int) STORED BY 'org.apache.carbondata.format';

b) Load Table:
LOAD DATA INPATH 'hdfs://localhost:54310/2000_UniqData.csv' into table uniqdata 
OPTIONS('DELIMITER'=',' , 
'QUOTECHAR'='"','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COL0UMN2,INTEGER_COLUMN1');

c)Select Query:select Cust_name,doj from uniqdata  limit 50;
+------------------+-------+--+
|    Cust_name     |  doj  |
+------------------+-------+--+
| CUST_NAME_00000  | NULL  |
| CUST_NAME_00000  | NULL  |
| CUST_NAME_00001  | NULL  |
| CUST_NAME_00002  | NULL  |
| CUST_NAME_00003  | NULL  |
| CUST_NAME_00004  | NULL  |
| CUST_NAME_00005  | NULL  |
| CUST_NAME_00006  | NULL  |
| CUST_NAME_00007  | NULL  |
| CUST_NAME_00008  | NULL  |
| CUST_NAME_00009  | NULL  |
| CUST_NAME_00010  | NULL  |
| CUST_NAME_00011  | NULL  |
| CUST_NAME_00012  | NULL  |
| CUST_NAME_00013  | NULL  |
| CUST_NAME_00014  | NULL  |
| CUST_NAME_00015  | NULL  |
| CUST_NAME_00016  | NULL  |
| CUST_NAME_00017  | NULL  |
| CUST_NAME_00018  | NULL  |
| CUST_NAME_00019  | NULL  |
| CUST_NAME_00020  | NULL  |
| CUST_NAME_00021  | NULL  |
| CUST_NAME_00022  | NULL  |
| CUST_NAME_00023  | NULL  |
| CUST_NAME_00024  | NULL  |
| CUST_NAME_00025  | NULL  |
| CUST_NAME_00026  | NULL  |
| CUST_NAME_00027  | NULL  |
| CUST_NAME_00028  | NULL  |
| CUST_NAME_00029  | NULL  |
| CUST_NAME_00030  | NULL  |
| CUST_NAME_00031  | NULL  |
| CUST_NAME_00032  | NULL  |
| CUST_NAME_00033  | NULL  |
| CUST_NAME_00034  | NULL  |
| CUST_NAME_00035  | NULL  |
| CUST_NAME_00036  | NULL  |
+------------------+-------+--+

2: Hive Table

a)create Table:

CREATE TABLE uniqdata_H (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION 
string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 
bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 
decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 
int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ",";

b)Load Data:

 LOAD DATA LOCAL INPATH '/home/vinod/Desktop/AllCSV/2000_UniqData.csv'OVERWRITE 
INTO TABLE uniqdata_h;

c)Select Query: select Cust_name,doj from uniqdata_h limit 50;
+------------------+------------------------+--+
|    Cust_name     |          doj           |
+------------------+------------------------+--+
| CUST_NAME_00000  | 1970-01-01 11:00:03.0  |
| CUST_NAME_00001  | 1970-01-02 02:00:03.0  |
| CUST_NAME_00002  | 1970-01-03 02:00:03.0  |
| CUST_NAME_00003  | 1970-01-04 02:00:03.0  |
| CUST_NAME_00004  | 1970-01-05 02:00:03.0  |
| CUST_NAME_00005  | 1970-01-06 02:00:03.0  |
| CUST_NAME_00006  | 1970-01-07 02:00:03.0  |
| CUST_NAME_00007  | 1970-01-08 02:00:03.0  |
| CUST_NAME_00008  | 1970-01-09 02:00:03.0  |
| CUST_NAME_00009  | 1970-01-10 02:00:03.0  |
| CUST_NAME_00010  | 1970-01-11 02:00:03.0  |
| CUST_NAME_00011  | 1970-01-12 02:00:03.0  |
| CUST_NAME_00012  | 1970-01-13 02:00:03.0  |
| CUST_NAME_00013  | 1970-01-14 02:00:03.0  |
| CUST_NAME_00014  | 1970-01-15 02:00:03.0  |
| CUST_NAME_00015  | 1970-01-16 02:00:03.0  |
| CUST_NAME_00016  | 1970-01-17 02:00:03.0  |
| CUST_NAME_00017  | 1970-01-18 02:00:03.0  |
| CUST_NAME_00018  | 1970-01-19 02:00:03.0  |
| CUST_NAME_00019  | 1970-01-20 02:00:03.0  |
| CUST_NAME_00020  | 1970-01-21 02:00:03.0  |
| CUST_NAME_00021  | 1970-01-22 02:00:03.0  |
| CUST_NAME_00022  | 1970-01-23 02:00:03.0  |
| CUST_NAME_00023  | 1970-01-24 02:00:03.0  |
| CUST_NAME_00024  | 1970-01-25 02:00:03.0  |
| CUST_NAME_00025  | 1970-01-26 02:00:03.0  |
| CUST_NAME_00026  | 1970-01-27 02:00:03.0  |
| CUST_NAME_00027  | 1970-01-28 02:00:03.0  |
| CUST_NAME_00028  | 1970-01-29 02:00:03.0  |
| CUST_NAME_00029  | 1970-01-30 02:00:03.0  |
| CUST_NAME_00030  | 1970-01-31 02:00:03.0  |
| CUST_NAME_00031  | 1970-02-01 02:00:03.0  |
| CUST_NAME_00032  | 1970-02-02 02:00:03.0  |
| CUST_NAME_00033  | 1970-02-03 02:00:03.0  |
| CUST_NAME_00034  | 1970-02-04 02:00:03.0  |
| CUST_NAME_00035  | 1970-02-05 02:00:03.0  |
| CUST_NAME_00036  | 1970-02-06 02:00:03.0  |
+------------------+------------------------+--+



  was:
Null displays in timestamp data type in Spark 1.6

Steps to reproduces:

1: CarbonData :

a) create table:

CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, 
DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 
bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 
decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 
int) STORED BY 'org.apache.carbondata.format';

b) Load Table:
LOAD DATA INPATH 'hdfs://localhost:54310/2000_UniqData.csv' into table uniqdata 
OPTIONS('DELIMITER'=',' , 
'QUOTECHAR'='"','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COL0UMN2,INTEGER_COLUMN1');

c)Select Query:select Cust_name,doj from uniqdata  limit 50;
+------------------+-------+--+
|    Cust_name     |  doj  |
+------------------+-------+--+
| CUST_NAME_00000  | NULL  |
| CUST_NAME_00000  | NULL  |
| CUST_NAME_00001  | NULL  |
| CUST_NAME_00002  | NULL  |
| CUST_NAME_00003  | NULL  |
| CUST_NAME_00004  | NULL  |
| CUST_NAME_00005  | NULL  |
| CUST_NAME_00006  | NULL  |
| CUST_NAME_00007  | NULL  |
| CUST_NAME_00008  | NULL  |
| CUST_NAME_00009  | NULL  |
| CUST_NAME_00010  | NULL  |
| CUST_NAME_00011  | NULL  |
| CUST_NAME_00012  | NULL  |
| CUST_NAME_00013  | NULL  |
| CUST_NAME_00014  | NULL  |
| CUST_NAME_00015  | NULL  |
| CUST_NAME_00016  | NULL  |
| CUST_NAME_00017  | NULL  |
| CUST_NAME_00018  | NULL  |
| CUST_NAME_00019  | NULL  |
| CUST_NAME_00020  | NULL  |
| CUST_NAME_00021  | NULL  |
| CUST_NAME_00022  | NULL  |
| CUST_NAME_00023  | NULL  |
| CUST_NAME_00024  | NULL  |
| CUST_NAME_00025  | NULL  |
| CUST_NAME_00026  | NULL  |
| CUST_NAME_00027  | NULL  |
| CUST_NAME_00028  | NULL  |
| CUST_NAME_00029  | NULL  |
| CUST_NAME_00030  | NULL  |
| CUST_NAME_00031  | NULL  |
| CUST_NAME_00032  | NULL  |
| CUST_NAME_00033  | NULL  |
| CUST_NAME_00034  | NULL  |
| CUST_NAME_00035  | NULL  |
| CUST_NAME_00036  | NULL  |
+------------------+-------+--+

2: Hive Table

a)create Table:

CREATE TABLE uniqdata_H (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION 
string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 
bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 
decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 
int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ",";

b)Load Data:

 LOAD DATA LOCAL INPATH '/home/vinod/Desktop/AllCSV/2000_UniqData.csv'OVERWRITE 
INTO TABLE uniqdata_nobucket22_Hive;

c)Select Query: select Cust_name,doj from uniqdata_h limit 50;
+------------------+------------------------+--+
|    Cust_name     |          doj           |
+------------------+------------------------+--+
| CUST_NAME_00000  | 1970-01-01 11:00:03.0  |
| CUST_NAME_00001  | 1970-01-02 02:00:03.0  |
| CUST_NAME_00002  | 1970-01-03 02:00:03.0  |
| CUST_NAME_00003  | 1970-01-04 02:00:03.0  |
| CUST_NAME_00004  | 1970-01-05 02:00:03.0  |
| CUST_NAME_00005  | 1970-01-06 02:00:03.0  |
| CUST_NAME_00006  | 1970-01-07 02:00:03.0  |
| CUST_NAME_00007  | 1970-01-08 02:00:03.0  |
| CUST_NAME_00008  | 1970-01-09 02:00:03.0  |
| CUST_NAME_00009  | 1970-01-10 02:00:03.0  |
| CUST_NAME_00010  | 1970-01-11 02:00:03.0  |
| CUST_NAME_00011  | 1970-01-12 02:00:03.0  |
| CUST_NAME_00012  | 1970-01-13 02:00:03.0  |
| CUST_NAME_00013  | 1970-01-14 02:00:03.0  |
| CUST_NAME_00014  | 1970-01-15 02:00:03.0  |
| CUST_NAME_00015  | 1970-01-16 02:00:03.0  |
| CUST_NAME_00016  | 1970-01-17 02:00:03.0  |
| CUST_NAME_00017  | 1970-01-18 02:00:03.0  |
| CUST_NAME_00018  | 1970-01-19 02:00:03.0  |
| CUST_NAME_00019  | 1970-01-20 02:00:03.0  |
| CUST_NAME_00020  | 1970-01-21 02:00:03.0  |
| CUST_NAME_00021  | 1970-01-22 02:00:03.0  |
| CUST_NAME_00022  | 1970-01-23 02:00:03.0  |
| CUST_NAME_00023  | 1970-01-24 02:00:03.0  |
| CUST_NAME_00024  | 1970-01-25 02:00:03.0  |
| CUST_NAME_00025  | 1970-01-26 02:00:03.0  |
| CUST_NAME_00026  | 1970-01-27 02:00:03.0  |
| CUST_NAME_00027  | 1970-01-28 02:00:03.0  |
| CUST_NAME_00028  | 1970-01-29 02:00:03.0  |
| CUST_NAME_00029  | 1970-01-30 02:00:03.0  |
| CUST_NAME_00030  | 1970-01-31 02:00:03.0  |
| CUST_NAME_00031  | 1970-02-01 02:00:03.0  |
| CUST_NAME_00032  | 1970-02-02 02:00:03.0  |
| CUST_NAME_00033  | 1970-02-03 02:00:03.0  |
| CUST_NAME_00034  | 1970-02-04 02:00:03.0  |
| CUST_NAME_00035  | 1970-02-05 02:00:03.0  |
| CUST_NAME_00036  | 1970-02-06 02:00:03.0  |
+------------------+------------------------+--+




> Null displays in timestamp data type in Spark 1.6
> -------------------------------------------------
>
>                 Key: CARBONDATA-1041
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-1041
>             Project: CarbonData
>          Issue Type: Bug
>          Components: data-query
>         Environment: Spark 1.6
>            Reporter: Vinod Rohilla
>            Priority: Minor
>         Attachments: 2000_UniqData.csv
>
>
> Null displays in timestamp data type in Spark 1.6
> Steps to reproduces:
> 1: CarbonData :
> a) create table:
> CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION 
> string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 
> bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 
> decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 
> int) STORED BY 'org.apache.carbondata.format';
> b) Load Table:
> LOAD DATA INPATH 'hdfs://localhost:54310/2000_UniqData.csv' into table 
> uniqdata OPTIONS('DELIMITER'=',' , 
> 'QUOTECHAR'='"','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COL0UMN2,INTEGER_COLUMN1');
> c)Select Query:select Cust_name,doj from uniqdata  limit 50;
> +------------------+-------+--+
> |    Cust_name     |  doj  |
> +------------------+-------+--+
> | CUST_NAME_00000  | NULL  |
> | CUST_NAME_00000  | NULL  |
> | CUST_NAME_00001  | NULL  |
> | CUST_NAME_00002  | NULL  |
> | CUST_NAME_00003  | NULL  |
> | CUST_NAME_00004  | NULL  |
> | CUST_NAME_00005  | NULL  |
> | CUST_NAME_00006  | NULL  |
> | CUST_NAME_00007  | NULL  |
> | CUST_NAME_00008  | NULL  |
> | CUST_NAME_00009  | NULL  |
> | CUST_NAME_00010  | NULL  |
> | CUST_NAME_00011  | NULL  |
> | CUST_NAME_00012  | NULL  |
> | CUST_NAME_00013  | NULL  |
> | CUST_NAME_00014  | NULL  |
> | CUST_NAME_00015  | NULL  |
> | CUST_NAME_00016  | NULL  |
> | CUST_NAME_00017  | NULL  |
> | CUST_NAME_00018  | NULL  |
> | CUST_NAME_00019  | NULL  |
> | CUST_NAME_00020  | NULL  |
> | CUST_NAME_00021  | NULL  |
> | CUST_NAME_00022  | NULL  |
> | CUST_NAME_00023  | NULL  |
> | CUST_NAME_00024  | NULL  |
> | CUST_NAME_00025  | NULL  |
> | CUST_NAME_00026  | NULL  |
> | CUST_NAME_00027  | NULL  |
> | CUST_NAME_00028  | NULL  |
> | CUST_NAME_00029  | NULL  |
> | CUST_NAME_00030  | NULL  |
> | CUST_NAME_00031  | NULL  |
> | CUST_NAME_00032  | NULL  |
> | CUST_NAME_00033  | NULL  |
> | CUST_NAME_00034  | NULL  |
> | CUST_NAME_00035  | NULL  |
> | CUST_NAME_00036  | NULL  |
> +------------------+-------+--+
> 2: Hive Table
> a)create Table:
> CREATE TABLE uniqdata_H (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION 
> string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 
> bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 
> decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 
> int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ",";
> b)Load Data:
>  LOAD DATA LOCAL INPATH 
> '/home/vinod/Desktop/AllCSV/2000_UniqData.csv'OVERWRITE INTO TABLE uniqdata_h;
> c)Select Query: select Cust_name,doj from uniqdata_h limit 50;
> +------------------+------------------------+--+
> |    Cust_name     |          doj           |
> +------------------+------------------------+--+
> | CUST_NAME_00000  | 1970-01-01 11:00:03.0  |
> | CUST_NAME_00001  | 1970-01-02 02:00:03.0  |
> | CUST_NAME_00002  | 1970-01-03 02:00:03.0  |
> | CUST_NAME_00003  | 1970-01-04 02:00:03.0  |
> | CUST_NAME_00004  | 1970-01-05 02:00:03.0  |
> | CUST_NAME_00005  | 1970-01-06 02:00:03.0  |
> | CUST_NAME_00006  | 1970-01-07 02:00:03.0  |
> | CUST_NAME_00007  | 1970-01-08 02:00:03.0  |
> | CUST_NAME_00008  | 1970-01-09 02:00:03.0  |
> | CUST_NAME_00009  | 1970-01-10 02:00:03.0  |
> | CUST_NAME_00010  | 1970-01-11 02:00:03.0  |
> | CUST_NAME_00011  | 1970-01-12 02:00:03.0  |
> | CUST_NAME_00012  | 1970-01-13 02:00:03.0  |
> | CUST_NAME_00013  | 1970-01-14 02:00:03.0  |
> | CUST_NAME_00014  | 1970-01-15 02:00:03.0  |
> | CUST_NAME_00015  | 1970-01-16 02:00:03.0  |
> | CUST_NAME_00016  | 1970-01-17 02:00:03.0  |
> | CUST_NAME_00017  | 1970-01-18 02:00:03.0  |
> | CUST_NAME_00018  | 1970-01-19 02:00:03.0  |
> | CUST_NAME_00019  | 1970-01-20 02:00:03.0  |
> | CUST_NAME_00020  | 1970-01-21 02:00:03.0  |
> | CUST_NAME_00021  | 1970-01-22 02:00:03.0  |
> | CUST_NAME_00022  | 1970-01-23 02:00:03.0  |
> | CUST_NAME_00023  | 1970-01-24 02:00:03.0  |
> | CUST_NAME_00024  | 1970-01-25 02:00:03.0  |
> | CUST_NAME_00025  | 1970-01-26 02:00:03.0  |
> | CUST_NAME_00026  | 1970-01-27 02:00:03.0  |
> | CUST_NAME_00027  | 1970-01-28 02:00:03.0  |
> | CUST_NAME_00028  | 1970-01-29 02:00:03.0  |
> | CUST_NAME_00029  | 1970-01-30 02:00:03.0  |
> | CUST_NAME_00030  | 1970-01-31 02:00:03.0  |
> | CUST_NAME_00031  | 1970-02-01 02:00:03.0  |
> | CUST_NAME_00032  | 1970-02-02 02:00:03.0  |
> | CUST_NAME_00033  | 1970-02-03 02:00:03.0  |
> | CUST_NAME_00034  | 1970-02-04 02:00:03.0  |
> | CUST_NAME_00035  | 1970-02-05 02:00:03.0  |
> | CUST_NAME_00036  | 1970-02-06 02:00:03.0  |
> +------------------+------------------------+--+



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

Reply via email to