[hive] branch branch-3 updated: HIVE-22105 Update ORC to 1.5.6 port to branch-3 (Owen O'Malley vi Alan Gates)
This is an automated email from the ASF dual-hosted git repository. gates pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new cea2d6b HIVE-22105 Update ORC to 1.5.6 port to branch-3 (Owen O'Malley vi Alan Gates) cea2d6b is described below commit cea2d6b30a2545d3432d787af29f37acaccfaac2 Author: Alan Gates AuthorDate: Tue Aug 20 15:24:20 2019 -0700 HIVE-22105 Update ORC to 1.5.6 port to branch-3 (Owen O'Malley vi Alan Gates) --- pom.xml| 2 +- .../hive/ql/io/orc/TestInputOutputFormat.java | 35 + ql/src/test/queries/clientpositive/orc_merge9.q| 8 - .../beeline/materialized_view_create_rewrite.q.out | 4 +- .../results/clientpositive/deleteAnalyze.q.out | 2 +- ...llowincompatible_vectorization_false_date.q.out | 1 - .../clientpositive/llap/default_constraint.q.out | 14 +- .../clientpositive/llap/deleteAnalyze.q.out| 2 +- .../llap/extrapolate_part_stats_partial_ndv.q.out | 36 ++--- .../llap/materialized_view_create.q.out| 6 +- .../llap/materialized_view_create_rewrite.q.out| 4 +- .../llap/materialized_view_create_rewrite_4.q.out | 6 +- .../llap/materialized_view_create_rewrite_5.q.out | 2 +- .../materialized_view_create_rewrite_dummy.q.out | 4 +- ...materialized_view_create_rewrite_multi_db.q.out | 4 +- ...erialized_view_create_rewrite_time_window.q.out | 6 +- ...ialized_view_create_rewrite_time_window_2.q.out | 6 +- .../llap/materialized_view_describe.q.out | 6 +- .../results/clientpositive/llap/orc_analyze.q.out | 34 ++-- .../clientpositive/llap/orc_llap_counters.q.out| 10 +- .../clientpositive/llap/orc_llap_counters1.q.out | 6 +- .../results/clientpositive/llap/orc_merge10.q.out | 4 +- .../results/clientpositive/llap/orc_merge11.q.out | 6 +- .../results/clientpositive/llap/orc_merge9.q.out | 50 -- .../clientpositive/llap/orc_ppd_basic.q.out| 14 +- .../llap/orc_ppd_schema_evol_3a.q.out | 68 .../results/clientpositive/orc_file_dump.q.out | 174 ++--- .../test/results/clientpositive/orc_merge10.q.out | 4 +- .../test/results/clientpositive/orc_merge11.q.out | 6 +- .../test/results/clientpositive/orc_merge12.q.out | 2 +- .../test/results/clientpositive/orc_merge9.q.out | 50 -- .../results/clientpositive/spark/orc_merge8.q.out | 10 +- .../results/clientpositive/spark/orc_merge9.q.out | 52 +- .../tez/acid_vectorization_original_tez.q.out | 24 +-- .../results/clientpositive/tez/orc_merge12.q.out | 2 +- 35 files changed, 243 insertions(+), 421 deletions(-) diff --git a/pom.xml b/pom.xml index d6175ff..96e0217 100644 --- a/pom.xml +++ b/pom.xml @@ -184,7 +184,7 @@ 0.9.3 2.10.0 2.3 -1.5.2 +1.5.6 1.10.19 1.7.4 2.0.0-M5 diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java index 208aeb5..326c7f6 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java @@ -3219,11 +3219,9 @@ public class TestInputOutputFormat { readOpsDelta = statistics.getReadOps() - readOpsBefore; } } -// call-1: open to read footer - split 1 => mock:/mocktable1/0_0 -// call-2: open to read data - split 1 => mock:/mocktable1/0_0 -// call-3: open to read footer - split 2 => mock:/mocktable1/0_1 -// call-4: open to read data - split 2 => mock:/mocktable1/0_1 -assertEquals(4, readOpsDelta); +// call-1: open to read - split 1 => mock:/mocktable1/0_0 +// call-2: open to read - split 2 => mock:/mocktable1/0_1 +assertEquals(2, readOpsDelta); // revert back to local fs conf.set("fs.defaultFS", "file:///"); @@ -3364,11 +3362,9 @@ public class TestInputOutputFormat { readOpsDelta = statistics.getReadOps() - readOpsBefore; } } -// call-1: open to read footer - split 1 => mock:/mocktable3/0_0 -// call-2: open to read data - split 1 => mock:/mocktable3/0_0 -// call-3: open to read footer - split 2 => mock:/mocktable3/0_1 -// call-4: open to read data - split 2 => mock:/mocktable3/0_1 -assertEquals(4, readOpsDelta); +// call-1: open to read - split 1 => mock:/mocktable3/0_0 +// call-2: open to read - split 2 => mock:/mocktable3/0_1 +assertEquals(2, readOpsDelta); // revert back to local fs conf.set("fs.defaultFS", "file:///"); @@ -3514,15 +3510,7 @@ public class TestInputOutputFormat { readOpsDelta = statistics.getReadOps() - readOpsBefore; } } -// call-1: open to read footer - split 1 => mock:/mocktable5/0_0 -// call-2: open to read data - split 1 =>
[hive] branch branch-3 updated: HIVE-22134: HIVE-22129: Remove glassfish.jersey and mssql-jdbc classes from jdbc-standalone jar (Naveen Gangam, reviewed by Thejas Nair)
This is an automated email from the ASF dual-hosted git repository. ngangam pushed a commit to branch branch-3 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3 by this push: new 02973f2 HIVE-22134: HIVE-22129: Remove glassfish.jersey and mssql-jdbc classes from jdbc-standalone jar (Naveen Gangam, reviewed by Thejas Nair) 02973f2 is described below commit 02973f25f791f9467dd171a7714affb1334a4fa2 Author: Naveen Gangam AuthorDate: Tue Aug 27 13:54:30 2019 -0400 HIVE-22134: HIVE-22129: Remove glassfish.jersey and mssql-jdbc classes from jdbc-standalone jar (Naveen Gangam, reviewed by Thejas Nair) --- jdbc/pom.xml | 86 +++- 1 file changed, 44 insertions(+), 42 deletions(-) diff --git a/jdbc/pom.xml b/jdbc/pom.xml index 587de2e..b0c2ef9 100644 --- a/jdbc/pom.xml +++ b/jdbc/pom.xml @@ -224,52 +224,19 @@ - org.apache.commons:commons-compress - org.apache.hadoop:hadoop-hdfs - org.apache.hadoop:hadoop-client - org.apache.hadoop:hadoop-hdfs-client - org.apache.hadoop:hadoop-mapreduce-client-jobclient - org.apache.hadoop:hadoop-mapreduce-client-common - org.apache.hadoop:hadoop-mapreduce-client-core - org.apache.hadoop:hadoop-yarn-client - org.apache.hadoop:hadoop-distcp - org.apache.hadoop:hadoop-yarn-server-resourcemanager - org.apache.hadoop:hadoop-yarn-common - org.apache.hadoop:hadoop-yarn-server-common - org.apache.hadoop:hadoop-yarn-server-applicationhistoryservice - org.apache.hadoop:hadoop-yarn-server-web-proxy - org.apache.hadoop:hadoop-annotations - org.apache.hadoop:hadoop-auth - org.apache.hbase*:* - org.apache.hive:hive-vector-code-gen - org.apache.ant:* - junit:* - org.hamcrest:* - org.ow2.asm:* - javax.jms:* - com.sun.jersey:* - com.sun.jersey.contribs:* - org.eclipse.jetty.aggregate:* - org.tukaani:* - io.airlift:* - io.dropwizard.metrics:* - org.apache.velocity:* - net.sf.jpam:* - org.apache.avro:* - org.apache.orc:* - net.sf.opencsv:* - org.antlr:* - org.slf4j:slf4j-log4j12 - log4j:* antlr:* aopalliance:* asm:* + com.fasterxml.jackson.core:* com.google.code.gson:* + com.google.code.findbugs:* com.google.inject:* com.google.inject.extensions:* + com.google.protobuf:* com.jamesmurty.utils:* com.jcraft:* com.jolbox:* + com.microsoft.sqlserver:mssql-jdbc commons-beanutils:* commons-cli:* commons-dbcp:* @@ -279,17 +246,20 @@ commons-io:* commons-net:* commons-pool:* - com.google.code.findbugs:* - com.google.protobuf:* + com.sun.jersey:* + com.sun.jersey.contribs:* com.sun.xml.bind:* + com.tdunning:* com.thoughtworks.paranamer:* com.twitter:* com.zaxxer:* - com.fasterxml.jackson.core:* + io.airlift:* + io.dropwizard.metrics:* io.netty:* javax.activation:* javax.inject:* javax.jdo:* + javax.jms:* javax.mail:* javax.servlet:* javax.servlet.jsp:* @@ -297,24 +267,56 @@ javax.xml.*:* jline:* joda-time:* + junit:* + log4j:* net.java.dev.jets3t:* + net.sf.jpam:* + net.sf.opencsv:* + org.antlr:* + org.apache.ant:* + org.apache.avro:* +
[hive] branch branch-3.1 updated: Preparing for 3.1.3 development.
This is an automated email from the ASF dual-hosted git repository. gates pushed a commit to branch branch-3.1 in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/branch-3.1 by this push: new ee48c29 Preparing for 3.1.3 development. ee48c29 is described below commit ee48c29b1b2de401e9243c413ebdb92d3ee4e5f8 Author: Alan Gates AuthorDate: Tue Aug 27 10:19:34 2019 -0700 Preparing for 3.1.3 development. --- accumulo-handler/pom.xml | 2 +- beeline/pom.xml | 2 +- classification/pom.xml | 2 +- cli/pom.xml | 2 +- common/pom.xml | 2 +- contrib/pom.xml | 2 +- druid-handler/pom.xml| 2 +- hbase-handler/pom.xml| 2 +- hcatalog/core/pom.xml| 2 +- hcatalog/hcatalog-pig-adapter/pom.xml| 2 +- hcatalog/pom.xml | 2 +- hcatalog/server-extensions/pom.xml | 2 +- hcatalog/streaming/pom.xml | 2 +- hcatalog/webhcat/java-client/pom.xml | 2 +- hcatalog/webhcat/svr/pom.xml | 2 +- hplsql/pom.xml | 2 +- itests/custom-serde/pom.xml | 2 +- itests/custom-udfs/pom.xml | 2 +- itests/custom-udfs/udf-classloader-udf1/pom.xml | 2 +- itests/custom-udfs/udf-classloader-udf2/pom.xml | 2 +- itests/custom-udfs/udf-classloader-util/pom.xml | 2 +- itests/custom-udfs/udf-vectorized-badexample/pom.xml | 2 +- itests/hcatalog-unit/pom.xml | 2 +- itests/hive-blobstore/pom.xml| 2 +- itests/hive-jmh/pom.xml | 2 +- itests/hive-minikdc/pom.xml | 2 +- itests/hive-unit-hadoop2/pom.xml | 2 +- itests/hive-unit/pom.xml | 2 +- itests/pom.xml | 4 ++-- itests/qtest-accumulo/pom.xml| 2 +- itests/qtest-druid/pom.xml | 2 +- itests/qtest-spark/pom.xml | 2 +- itests/qtest/pom.xml | 2 +- itests/test-serde/pom.xml| 2 +- itests/util/pom.xml | 2 +- jdbc-handler/pom.xml | 2 +- jdbc/pom.xml | 2 +- kryo-registrator/pom.xml | 2 +- llap-client/pom.xml | 2 +- llap-common/pom.xml | 2 +- llap-ext-client/pom.xml | 2 +- llap-server/pom.xml | 2 +- llap-tez/pom.xml | 2 +- metastore/pom.xml| 2 +- packaging/pom.xml| 2 +- pom.xml | 2 +- ql/pom.xml | 2 +- serde/pom.xml| 2 +- service-rpc/pom.xml | 2 +- service/pom.xml | 2 +- shims/0.23/pom.xml | 2 +- shims/aggregator/pom.xml | 2 +- shims/common/pom.xml | 2 +- shims/pom.xml| 2 +- shims/scheduler/pom.xml | 2 +- spark-client/pom.xml | 4 ++-- standalone-metastore/pom.xml | 2 +- streaming/pom.xml| 2 +- testutils/pom.xml| 2 +- upgrade-acid/pom.xml | 2 +- vector-code-gen/pom.xml | 2 +- 61 files changed, 63 insertions(+), 63 deletions(-) diff --git a/accumulo-handler/pom.xml b/accumulo-handler/pom.xml index c3254c3..71f7a39 100644 --- a/accumulo-handler/pom.xml +++ b/accumulo-handler/pom.xml @@ -19,7 +19,7 @@ org.apache.hive hive -3.1.2 +3.1.3-SNAPSHOT ../pom.xml diff --git a/beeline/pom.xml b/beeline/pom.xml index 14a625a..1c1b400 100644 --- a/beeline/pom.xml +++ b/beeline/pom.xml @@ -19,7 +19,7 @@ org.apache.hive hive -3.1.2 +3.1.3-SNAPSHOT ../pom.xml diff --git a/classification/pom.xml b/classification/pom.xml index cdf456f..4d19d84 100644 --- a/classification/pom.xml +++ b/classification/pom.xml @@ -19,7 +19,7 @@ org.apache.hive hive -3.1.2 +3.1.3-SNAPSHOT ../pom.xml
svn commit: r1049276 - in /websites/production/hive/content: ./ javadocs/
Author: gates Date: Tue Aug 27 16:59:08 2019 New Revision: 1049276 Log: Hive 3.1.2 release Added: websites/production/hive/content/ - copied from r1049275, websites/staging/hive/trunk/content/ websites/production/hive/content/javadocs/ - copied from r1049275, websites/production/hive/content/javadocs/
svn commit: r1049275 - in /websites/staging/hive/trunk/content: ./ downloads.html javadoc.html
Author: buildbot Date: Tue Aug 27 16:56:10 2019 New Revision: 1049275 Log: Staging update by buildbot for hive Modified: websites/staging/hive/trunk/content/ (props changed) websites/staging/hive/trunk/content/downloads.html websites/staging/hive/trunk/content/javadoc.html Propchange: websites/staging/hive/trunk/content/ -- --- cms:source-revision (original) +++ cms:source-revision Tue Aug 27 16:56:10 2019 @@ -1 +1 @@ -1865792 +1865996 Modified: websites/staging/hive/trunk/content/downloads.html == --- websites/staging/hive/trunk/content/downloads.html (original) +++ websites/staging/hive/trunk/content/downloads.html Tue Aug 27 16:56:10 2019 @@ -124,6 +124,9 @@ h2:hover > .headerlink, h3:hover > .head guaranteed to be stable. For stable releases, look in the stable directory. News +26 August 2019: release 3.1.2 available +This release works with Hadoop 3.x.y. +You can look at the complete https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12344397styleName=HtmlprojectId=12310843;>JIRA change log for this release. 23 August 2019: release 2.3.6 available This release works with Hadoop 2.x.y. You can look at the complete https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12345603styleName=TextprojectId=12310843;>JIRA change log for this release. Modified: websites/staging/hive/trunk/content/javadoc.html == --- websites/staging/hive/trunk/content/javadoc.html (original) +++ websites/staging/hive/trunk/content/javadoc.html Tue Aug 27 16:56:10 2019 @@ -120,7 +120,7 @@ h2:hover > .headerlink, h3:hover > .head h2:hover > .headerlink, h3:hover > .headerlink, h1:hover > .headerlink, h6:hover > .headerlink, h4:hover > .headerlink, h5:hover > .headerlink, dt:hover > .elementid-permalink { visibility: visible } Recent versions: -Hive 3.1.1 Javadocs +Hive 3.1.2 Javadocs Hive 3.0.0 Javadocs Hive 2.3.6 Javadocs Hive 2.2.0 Javadocs
svn commit: r1865996 - in /hive/cms/trunk/content: downloads.mdtext javadoc.mdtext
Author: gates Date: Tue Aug 27 16:56:03 2019 New Revision: 1865996 URL: http://svn.apache.org/viewvc?rev=1865996=rev Log: Hive website update for 3.1.2 release. Modified: hive/cms/trunk/content/downloads.mdtext hive/cms/trunk/content/javadoc.mdtext Modified: hive/cms/trunk/content/downloads.mdtext URL: http://svn.apache.org/viewvc/hive/cms/trunk/content/downloads.mdtext?rev=1865996=1865995=1865996=diff == --- hive/cms/trunk/content/downloads.mdtext (original) +++ hive/cms/trunk/content/downloads.mdtext Tue Aug 27 16:56:03 2019 @@ -11,6 +11,10 @@ directory. ## News +### 26 August 2019: release 3.1.2 available +This release works with Hadoop 3.x.y. +You can look at the complete [JIRA change log for this release][HIVE_3_1_2_CL]. + ### 23 August 2019: release 2.3.6 available This release works with Hadoop 2.x.y. You can look at the complete [JIRA change log for this release][HIVE_2_3_6_CL]. @@ -149,6 +153,7 @@ This release works with Hadoop 0.20.x, You can look at the complete [JIRA change log for this release][HIVE_10_CL]. [HIVE_DL]: http://www.apache.org/dyn/closer.cgi/hive/ +[HIVE_3_1_2_CL]: https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12344397=Html=12310843 [HIVE_2_3_6_CL]: https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12345603=Text=12310843 [HIVE_2_3_5_CL]: https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12345394=Text=12310843 [HIVE_2_3_4_CL]: https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12344319=Text=12310843 Modified: hive/cms/trunk/content/javadoc.mdtext URL: http://svn.apache.org/viewvc/hive/cms/trunk/content/javadoc.mdtext?rev=1865996=1865995=1865996=diff == --- hive/cms/trunk/content/javadoc.mdtext (original) +++ hive/cms/trunk/content/javadoc.mdtext Tue Aug 27 16:56:03 2019 @@ -2,7 +2,7 @@ Title: Javadoc Recent versions: - * [Hive 3.1.1 Javadocs][r3.1.1] + * [Hive 3.1.2 Javadocs][r3.1.2] * [Hive 3.0.0 Javadocs][r3.0.0] * [Hive 2.3.6 Javadocs][r2.3.6] * [Hive 2.2.0 Javadocs][r2.2.0] @@ -18,7 +18,7 @@ Archived versions: javadoc and sources jars for use in an IDE are also available via [Nexus][] -[r3.1.1]: /javadocs/r3.1.1/api/index.html +[r3.1.2]: /javadocs/r3.1.2/api/index.html [r3.0.0]: /javadocs/r3.0.0/api/index.html [r2.3.6]: /javadocs/r2.3.6/api/index.html [r2.2.0]: /javadocs/r2.2.0/api/index.html
[hive] branch master updated: HIVE-21944: Remove unused methods, fields and variables from Vectorizer (Ivan Suller via Laszlo Bodor)
This is an automated email from the ASF dual-hosted git repository. abstractdog pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new d26516e HIVE-21944: Remove unused methods, fields and variables from Vectorizer (Ivan Suller via Laszlo Bodor) d26516e is described below commit d26516e7e18d2a729a681ee23f5e13c7282af94e Author: Ivan Suller AuthorDate: Tue Aug 27 13:53:22 2019 +0200 HIVE-21944: Remove unused methods, fields and variables from Vectorizer (Ivan Suller via Laszlo Bodor) Signed-off-by: Laszlo Bodor --- .../hive/ql/optimizer/physical/Vectorizer.java | 71 ++ 1 file changed, 6 insertions(+), 65 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index ed3db4c..b650299 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -104,9 +104,6 @@ import org.apache.hadoop.hive.ql.io.ZeroRowsInputFormat; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.lib.NodeProcessor; -import org.apache.hadoop.hive.ql.lib.Rule; -import org.apache.hadoop.hive.ql.lib.RuleRegExp; import org.apache.hadoop.hive.ql.lib.TaskGraphWalker; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; @@ -312,9 +309,9 @@ public class Vectorizer implements PhysicalPlanResolver { }; } - boolean isVectorizationEnabled; + private boolean isVectorizationEnabled; private EnabledOverride vectorizationEnabledOverride; - boolean isTestForcedVectorizationEnable; + private boolean isTestForcedVectorizationEnable; private boolean useVectorizedInputFileFormat; private boolean useVectorDeserialize; @@ -858,10 +855,7 @@ public class Vectorizer implements PhysicalPlanResolver { VectorTaskColumnInfo vectorTaskColumnInfo) throws VectorizerCannotVectorizeException { -List> vectorChildren = newOperatorList(); List> children = parent.getChildOperators(); -List>> listOfChildMultipleParents = -new ArrayList>>(); final int childrenCount = children.size(); for (int i = 0; i < childrenCount; i++) { @@ -977,7 +971,6 @@ public class Vectorizer implements PhysicalPlanResolver { vContext = ((VectorizationOperator) vectorParent).getInputVectorizationContext(); } -OperatorDesc desc = child.getConf(); Operator vectorChild; try { @@ -1193,13 +1186,6 @@ public class Vectorizer implements PhysicalPlanResolver { validateAndVectorizeMapWork(mapWork, vectorTaskColumnInfo, isTezOrSpark); } -private void addMapWorkRules(Map opRules, NodeProcessor np) { - opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + ".*" - + FileSinkOperator.getOperatorName()), np); - opRules.put(new RuleRegExp("R2", TableScanOperator.getOperatorName() + ".*" - + ReduceSinkOperator.getOperatorName()), np); -} - /* * Determine if there is only one TableScanOperator. Currently in Map vectorization, we do not * try to vectorize multiple input trees. @@ -1232,7 +1218,7 @@ public class Vectorizer implements PhysicalPlanResolver { setNodeIssue("Vectorized map work only works with 1 TableScanOperator"); return null; } - return new ImmutablePair(alias, tableScanOperator); + return new ImmutablePair<>(alias, tableScanOperator); } private void getTableScanOperatorSchemaInfo(TableScanOperator tableScanOperator, @@ -2051,8 +2037,6 @@ public class Vectorizer implements PhysicalPlanResolver { vectorTaskColumnInfo.transferToBaseWork(mapWork); mapWork.setVectorMode(true); - - return; } private boolean validateAndVectorizeMapOperators(MapWork mapWork, TableScanOperator tableScanOperator, @@ -2227,8 +2211,6 @@ public class Vectorizer implements PhysicalPlanResolver { private void validateAndVectorizeReduceWork(ReduceWork reduceWork, VectorTaskColumnInfo vectorTaskColumnInfo) throws SemanticException { - Operator reducer = reduceWork.getReducer(); - // Validate input to ReduceWork. if (!getOnlyStructObjectInspectors(reduceWork, vectorTaskColumnInfo)) { return; @@ -2613,18 +2595,6 @@ public class Vectorizer implements PhysicalPlanResolver { return true; } - private boolean validateSparkHashTableSinkOperator(SparkHashTableSinkOperator op) { -SparkHashTableSinkDesc desc = op.getConf(); -byte tag = desc.getTag(); -// it's essentially a MapJoinDesc -List filterExprs =
[hive] branch master updated: HIVE-21962: Replacing ArrayList params with List in and around PlanUtils and MapWork (Ivan Suller via Zoltan Haindrich, Laszlo Bodor)
This is an automated email from the ASF dual-hosted git repository. abstractdog pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 91f9ff5 HIVE-21962: Replacing ArrayList params with List in and around PlanUtils and MapWork (Ivan Suller via Zoltan Haindrich, Laszlo Bodor) 91f9ff5 is described below commit 91f9ff5648854c487860a79b82450538fbcf6c48 Author: Ivan Suller AuthorDate: Tue Aug 27 13:39:06 2019 +0200 HIVE-21962: Replacing ArrayList params with List in and around PlanUtils and MapWork (Ivan Suller via Zoltan Haindrich, Laszlo Bodor) Signed-off-by: Laszlo Bodor --- .../hive/ql/optimizer/ColumnPrunerProcFactory.java | 5 ++- .../hive/ql/optimizer/ConvertJoinMapJoin.java | 6 ++-- .../hive/ql/optimizer/ReduceSinkMapJoinProc.java | 3 +- .../correlation/ReduceSinkDeDuplicationUtils.java | 2 +- .../hive/ql/optimizer/lineage/OpProcFactory.java | 4 +-- .../physical/BucketingSortingOpProcFactory.java| 2 +- .../hive/ql/optimizer/physical/Vectorizer.java | 17 - .../apache/hadoop/hive/ql/plan/FileSinkDesc.java | 9 +++-- .../org/apache/hadoop/hive/ql/plan/MapWork.java| 4 +-- .../org/apache/hadoop/hive/ql/plan/PlanUtils.java | 23 ++-- .../apache/hadoop/hive/ql/plan/ReduceSinkDesc.java | 42 +++--- .../hive/ql/plan/VectorizationCondition.java | 3 +- 12 files changed, 58 insertions(+), 62 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java index d5f51bf..697d3b2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java @@ -575,7 +575,7 @@ public final class ColumnPrunerProcFactory { ReduceSinkDesc conf = op.getConf(); List colLists = new ArrayList<>(); - ArrayList keys = conf.getKeyCols(); + List keys = conf.getKeyCols(); LOG.debug("Reduce Sink Operator " + op.getIdentifier() + " key:" + keys); for (ExprNodeDesc key : keys) { colLists = mergeFieldNodesWithDesc(colLists, key); @@ -874,8 +874,7 @@ public final class ColumnPrunerProcFactory { private static boolean[] getPruneReduceSinkOpRetainFlags( List retainedParentOpOutputCols, ReduceSinkOperator reduce) { ReduceSinkDesc reduceConf = reduce.getConf(); -java.util.ArrayList originalValueEval = reduceConf -.getValueCols(); +List originalValueEval = reduceConf.getValueCols(); boolean[] flags = new boolean[originalValueEval.size()]; for (int i = 0; i < originalValueEval.size(); i++) { flags[i] = false; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java index b4cc76a..de61be8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java @@ -639,7 +639,7 @@ public class ConvertJoinMapJoin implements NodeProcessor { ReduceSinkOperator bigTableRS = (ReduceSinkOperator)joinOp.getParentOperators().get(bigTablePosition); OpTraits opTraits = bigTableRS.getOpTraits(); List> listBucketCols = opTraits.getBucketColNames(); -ArrayList bigTablePartitionCols = bigTableRS.getConf().getPartitionCols(); +List bigTablePartitionCols = bigTableRS.getConf().getPartitionCols(); boolean updatePartitionCols = false; List positions = new ArrayList<>(); @@ -691,8 +691,8 @@ public class ConvertJoinMapJoin implements NodeProcessor { } ReduceSinkOperator rsOp = (ReduceSinkOperator) op; -ArrayList newPartitionCols = new ArrayList<>(); -ArrayList partitionCols = rsOp.getConf().getPartitionCols(); +List newPartitionCols = new ArrayList<>(); +List partitionCols = rsOp.getConf().getPartitionCols(); for (Integer position : positions) { newPartitionCols.add(partitionCols.get(position)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java index 81684be..89b5500 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ReduceSinkMapJoinProc.java @@ -177,7 +177,7 @@ public class ReduceSinkMapJoinProc implements NodeProcessor { keyCount = rowCount = Long.MAX_VALUE; } tableSize = stats.getDataSize(); - ArrayList keyCols = parentRS.getConf().getOutputKeyColumnNames(); + List keyCols = parentRS.getConf().getOutputKeyColumnNames(); if (keyCols != null && !keyCols.isEmpty()) { //
[hive] branch master updated: HIVE-22140: Metrics: unify codahale metric frequency unit between metastore and hiveserver2 (Laszlo Bodor reviewed by Zoltan Haindrich)
This is an automated email from the ASF dual-hosted git repository. abstractdog pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git The following commit(s) were added to refs/heads/master by this push: new 96b77b2 HIVE-22140: Metrics: unify codahale metric frequency unit between metastore and hiveserver2 (Laszlo Bodor reviewed by Zoltan Haindrich) 96b77b2 is described below commit 96b77b24a8bc45c041bf965b2a16b3e077dded7f Author: Laszlo Bodor AuthorDate: Tue Aug 27 13:09:39 2019 +0200 HIVE-22140: Metrics: unify codahale metric frequency unit between metastore and hiveserver2 (Laszlo Bodor reviewed by Zoltan Haindrich) Signed-off-by: Laszlo Bodor --- .../main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index 927324e..1d5a771 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -690,7 +690,7 @@ public class MetastoreConf { METRICS_HADOOP2_COMPONENT_NAME("metastore.metrics.hadoop2.component", "hive.service.metrics.hadoop2.component", "hivemetastore", "Component name to provide to Hadoop2 Metrics system."), METRICS_JSON_FILE_INTERVAL("metastore.metrics.file.frequency", -"hive.service.metrics.file.frequency", 1, TimeUnit.MINUTES, +"hive.service.metrics.file.frequency", 6, TimeUnit.MILLISECONDS, "For json metric reporter, the frequency of updating JSON metrics file."), METRICS_JSON_FILE_LOCATION("metastore.metrics.file.location", "hive.service.metrics.file.location", "/tmp/report.json",