[ 
https://issues.apache.org/jira/browse/HIVE-17098?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16412803#comment-16412803
 ] 

Hive QA commented on HIVE-17098:
--------------------------------



Here are the results of testing the latest attachment:
https://issues.apache.org/jira/secure/attachment/12915896/HIVE-17098.2.patch

{color:red}ERROR:{color} -1 due to no test(s) being added or modified.

{color:red}ERROR:{color} -1 due to 83 failed/errored test(s), 13820 tests 
executed
*Failed tests:*
{noformat}
TestMinimrCliDriver - did not produce a TEST-*.xml file (likely timed out) 
(batchId=92)
        
[infer_bucket_sort_num_buckets.q,infer_bucket_sort_reducers_power_two.q,parallel_orderby.q,bucket_num_reducers_acid.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_merge.q,root_dir_external_table.q,infer_bucket_sort_dyn_part.q,udf_using.q,bucket_num_reducers_acid2.q]
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[acid_table_stats] 
(batchId=54)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[druid_timeseries] 
(batchId=60)
org.apache.hadoop.hive.cli.TestMiniLlapCliDriver.testCliDriver[llap_smb] 
(batchId=152)
org.apache.hadoop.hive.cli.TestMiniLlapCliDriver.testCliDriver[mm_all] 
(batchId=151)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[bucket_map_join_tez1]
 (batchId=173)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[bucket_map_join_tez_empty]
 (batchId=158)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[default_constraint]
 (batchId=162)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[groupby_groupingset_bug]
 (batchId=172)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[insert_values_orig_table_use_metadata]
 (batchId=168)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[mergejoin] 
(batchId=167)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[sysdb] 
(batchId=161)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[tez_dynpart_hashjoin_1]
 (batchId=172)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[tez_smb_main]
 (batchId=159)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[update_access_time_non_current_db]
 (batchId=170)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[vectorization_div0]
 (batchId=169)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[vectorized_dynamic_semijoin_reduction]
 (batchId=153)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.org.apache.hadoop.hive.cli.TestNegativeCliDriver
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[mm_convert] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[mm_truncate_cols]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[smb_bucketmapjoin]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[smb_mapjoin_14] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[sortmerge_mapjoin_mismatch_1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_aggregator_error_1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_aggregator_error_2]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_publisher_error_1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_corr_in_agg]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_notin_implicit_gby]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_bucketed_column]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_column_seqfile]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_format_number_wrong3]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_greatest_error_2]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_if_not_bool] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_if_wrong_args_len]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_in] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_instr_wrong_args_len]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_invalid] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_likeall_wrong1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_likeany_wrong1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_keys_arg_num]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_keys_arg_type]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_values_arg_type]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_max] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_next_day_error_1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_next_day_error_2]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_nonexistent_resource]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_printf_wrong4]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_reflect_neg] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_by_wrong1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong2]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong3]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_test_error] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error2]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error3]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_explode_not_supported4]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_not_supported1]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_not_supported3]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[union22] 
(batchId=94)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[union2] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unionSortBy] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[uniquejoin3] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[uniquejoin] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unset_table_property]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[updateBasicStats]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_bucket_col]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_non_acid_table]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_notnull_constraint]
 (batchId=94)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_partition_col]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[view_update] 
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_invalid_udaf]
 (batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_leadlag_in_udaf]
 (batchId=95)
org.apache.hadoop.hive.cli.TestSparkCliDriver.testCliDriver[subquery_scalar] 
(batchId=124)
org.apache.hadoop.hive.druid.serde.TestDruidSerDe.testDruidDeserializer 
(batchId=259)
org.apache.hadoop.hive.metastore.TestMetastoreVersion.testMetastoreVersion 
(batchId=225)
org.apache.hadoop.hive.metastore.TestMetastoreVersion.testVersionMatching 
(batchId=225)
org.apache.hadoop.hive.metastore.client.TestAppendPartitions.testAppendPartitionNullPartValues[Embedded]
 (batchId=207)
org.apache.hadoop.hive.metastore.client.TestAppendPartitions.testAppendPartitionNullPartValues[Remote]
 (batchId=207)
org.apache.hadoop.hive.ql.TestAcidOnTez.testGetSplitsLocks (batchId=226)
org.apache.hadoop.hive.ql.TestMTQueries.testMTQueries1 (batchId=229)
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter.testWrite (batchId=259)
org.apache.hive.jdbc.TestJdbcWithMiniLlap.testLlapInputFormatEndToEnd 
(batchId=237)
{noformat}

Test results: https://builds.apache.org/job/PreCommit-HIVE-Build/9805/testReport
Console output: https://builds.apache.org/job/PreCommit-HIVE-Build/9805/console
Test logs: http://104.198.109.242/logs/PreCommit-HIVE-Build-9805/

Messages:
{noformat}
Executing org.apache.hive.ptest.execution.TestCheckPhase
Executing org.apache.hive.ptest.execution.PrepPhase
Executing org.apache.hive.ptest.execution.YetusPhase
Executing org.apache.hive.ptest.execution.ExecutionPhase
Executing org.apache.hive.ptest.execution.ReportingPhase
Tests exited with: TestsFailedException: 83 tests failed
{noformat}

This message is automatically generated.

ATTACHMENT ID: 12915896 - PreCommit-HIVE-Build

> Race condition in Hbase tables
> ------------------------------
>
>                 Key: HIVE-17098
>                 URL: https://issues.apache.org/jira/browse/HIVE-17098
>             Project: Hive
>          Issue Type: Bug
>          Components: HBase Handler
>    Affects Versions: 2.1.1
>            Reporter: Oleksiy Sayankin
>            Assignee: Oleksiy Sayankin
>            Priority: Critical
>         Attachments: HIVE-17098.1.patch, HIVE-17098.2.patch
>
>
> These steps simulate our customer production env.
> *STEP 1. Create test tables*
> {code}
> CREATE TABLE for_loading(
>   key int, 
>   value string,
>   age int,
>   salary decimal (10,2)
> ) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';
> {code}
> Table {{test_1}} belongs to user {{testuser1}}.
> {code}
> CREATE TABLE test_1(
>   key int, 
>   value string,
>   age int,
>   salary decimal (10,2)
> )
> ROW FORMAT SERDE 
>   'org.apache.hadoop.hive.hbase.HBaseSerDe' 
> STORED BY 
>   'org.apache.hadoop.hive.hbase.HBaseStorageHandler' 
> WITH SERDEPROPERTIES ( 
>   'hbase.columns.mapping'=':key, cf1:value, cf1:age, cf1:salary', 
>   'serialization.format'='1')
> TBLPROPERTIES (
>   'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 
>   'hbase.table.name'='test_1', 
>   'numFiles'='0', 
>   'numRows'='0', 
>   'rawDataSize'='0', 
>   'totalSize'='0', 
>   'transient_lastDdlTime'='1495769316');
> {code}
> Table {{test_2}} belongs to user {{testuser2}}.
> {code}
> CREATE TABLE test_2(
>   key int, 
>   value string,
>   age int,
>   salary decimal (10,2)
> )
> ROW FORMAT SERDE 
>   'org.apache.hadoop.hive.hbase.HBaseSerDe' 
> STORED BY 
>   'org.apache.hadoop.hive.hbase.HBaseStorageHandler' 
> WITH SERDEPROPERTIES ( 
>   'hbase.columns.mapping'=':key, cf1:value, cf1:age, cf1:salary', 
>   'serialization.format'='1')
> TBLPROPERTIES (
>   'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 
>   'hbase.table.name'='test_2', 
>   'numFiles'='0', 
>   'numRows'='0', 
>   'rawDataSize'='0', 
>   'totalSize'='0', 
>   'transient_lastDdlTime'='1495769316');
> {code}
> *STEP 2. Create test data*
> {code}
> import java.io.IOException;
> import java.math.BigDecimal;
> import java.nio.charset.Charset;
> import java.nio.file.Files;
> import java.nio.file.Path;
> import java.nio.file.Paths;
> import java.nio.file.StandardOpenOption;
> import java.util.ArrayList;
> import java.util.Arrays;
> import java.util.List;
> import java.util.Random;
> import static java.lang.String.format;
> public class Generator {
>     private static List<String> lines = new ArrayList<>();
>     private static List<String> name = Arrays.asList("Brian", "John", 
> "Rodger", "Max", "Freddie", "Albert", "Fedor", "Lev", "Niccolo");
>     private static List<BigDecimal> salary = new ArrayList<>();
>     public static void main(String[] args) {
>         generateData(Integer.parseInt(args[0]), args[1]);
>     }
>     public static void generateData(int rowNumber, String file) {
>         double maxValue = 20000.55;
>         double minValue = 1000.03;
>         Random random = new Random();
>         for (int i = 1; i <= rowNumber; i++) {
>             lines.add(
>                 i + "," +
>                     name.get(random.nextInt(name.size())) + "," +
>                     (random.nextInt(62) + 18) + "," +
>                     format("%.2f", (minValue + (maxValue - minValue) * 
> random.nextDouble())));
>         }
>         Path path = Paths.get(file);
>         try {
>             Files.write(path, lines, Charset.forName("UTF-8"), 
> StandardOpenOption.APPEND);
>         } catch (IOException e) {
>             e.printStackTrace();
>         }
>     }
> }
> {code}
> {code}
> javac Generator.java
> java Generator 3000000 dataset.csv
> hadoop fs -put dataset.csv /
> {code}
> *STEP 3. Upload test data*
> {code}
> load data local inpath '/home/myuser/dataset.csv' into table for_loading;
> {code}
> {code}
> from for_loading
> insert into table test_1
> select key,value,age,salary;
> {code}
> {code}
> from for_loading
> insert into table test_2
> select key,value,age,salary;
> {code}
> *STEP 4. Run test queries*
> Run in 5 parallel terminals for table {{test_1}}
> {code}
> for i in {1..500}; do beeline -u "jdbc:hive2://localhost:10000/default 
> testuser1" -e "select * from test_1 limit 10;" 1>/dev/null; done
> {code}
> Run in 5 parallel terminals for table {{test_2}}
> {code}
> for i in {1..500}; do beeline -u "jdbc:hive2://localhost:10000/default 
> testuser2" -e "select * from test_2 limit 10;" 1>/dev/null; done
> {code}
> *EXPECTED RESULT:*
> All queris are OK.
> *ACTUAL RESULT*
> {code}
> org.apache.hive.service.cli.HiveSQLException: java.io.IOException: 
> java.lang.IllegalStateException: The input format instance has not been 
> properly ini
> tialized. Ensure you call initializeTable either in your constructor or 
> initialize method
>         at 
> org.apache.hive.service.cli.operation.SQLOperation.getNextRowSet(SQLOperation.java:484)
>         at 
> org.apache.hive.service.cli.operation.OperationManager.getOperationNextRowSet(OperationManager.java:308)
>         at 
> org.apache.hive.service.cli.session.HiveSessionImpl.fetchResults(HiveSessionImpl.java:847)
>         at sun.reflect.GeneratedMethodAccessor8.invoke(Unknown Source)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:606)
>         at 
> org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78)
>         at 
> org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36)
>         at 
> org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at javax.security.auth.Subject.doAs(Subject.java:415)
>         at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1595)
>         at 
> org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59)
>         at com.sun.proxy.$Proxy25.fetchResults(Unknown Source)
>         at 
> org.apache.hive.service.cli.CLIService.fetchResults(CLIService.java:504)
>         at 
> org.apache.hive.service.cli.thrift.ThriftCLIService.FetchResults(ThriftCLIService.java:698)
>         at 
> org.apache.hive.service.rpc.thrift.TCLIService$Processor$FetchResults.getResult(TCLIService.java:1717)
>         at 
> org.apache.hive.service.rpc.thrift.TCLIService$Processor$FetchResults.getResult(TCLIService.java:1702)
>         at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
>         at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
>         at 
> org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:56)
>         at 
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: java.io.IOException: java.lang.IllegalStateException: The input 
> format instance has not been properly initialized. Ensure you call 
> initializeTable either in your constructor or initialize method
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:521)
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:428)
>         at org.apache.hadoop.hive.ql.exec.FetchTask.fetch(FetchTask.java:146)
>         at org.apache.hadoop.hive.ql.Driver.getResults(Driver.java:2099)
>         at 
> org.apache.hive.service.cli.operation.SQLOperation.getNextRowSet(SQLOperation.java:479)
>         ... 24 more
> Caused by: java.lang.IllegalStateException: The input format instance has not 
> been properly initialized. Ensure you call initializeTable either in your 
> constructor or initialize method
>         at 
> org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getRegionLocator(TableInputFormatBase.java:579)
>         at 
> org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getStartEndKeys(TableInputFormatBase.java:225)
>         at 
> org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getSplits(TableInputFormatBase.java:261)
>         at 
> org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat.getSplitsInternal(HiveHBaseTableInputFormat.java:525)
>         at 
> org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat.getSplits(HiveHBaseTableInputFormat.java:452)
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextSplits(FetchOperator.java:372)
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getRecordReader(FetchOperator.java:304)
>         at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:459)
>         ... 28 more
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to