This is an automated email from the ASF dual-hosted git repository. kgyrtkirk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git
commit 3fb2b3883228ef7b20e515775fd9870b853ea627 Author: Zoltan Haindrich <[email protected]> AuthorDate: Tue Oct 1 11:21:58 2019 +0000 HIVE-22245: Make qtest feature parser reuseable (Zoltan Haindrich reviewed by László Bodor) Signed-off-by: Zoltan Haindrich <[email protected]> --- .../hadoop/hive/cli/control/CoreBeeLineDriver.java | 16 +++-- .../hive/cli/control/CoreCompareCliDriver.java | 2 +- .../java/org/apache/hadoop/hive/ql/QTestUtil.java | 10 ++- .../hadoop/hive/ql/dataset/DatasetCollection.java | 9 +++ .../hadoop/hive/ql/dataset/DatasetParser.java | 73 --------------------- .../hive/ql/dataset/QTestDatasetHandler.java | 65 +++++++++++-------- .../hive/ql/qoption/QTestOptionDispatcher.java | 75 ++++++++++++++++++++++ .../QTestOptionHandler.java} | 28 ++------ .../hadoop/hive/ql/dataset/TestDatasetParser.java | 58 ----------------- 9 files changed, 146 insertions(+), 190 deletions(-) diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java index 99748bd..c8239a7 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java @@ -39,8 +39,9 @@ import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.dataset.Dataset; import org.apache.hadoop.hive.ql.dataset.DatasetCollection; -import org.apache.hadoop.hive.ql.dataset.DatasetParser; +import org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler; import org.apache.hadoop.hive.ql.hooks.PreExecutePrinter; +import org.apache.hadoop.hive.ql.qoption.QTestOptionDispatcher; import org.apache.hive.beeline.ConvertedOutputFile.Converter; import org.apache.hive.beeline.QFile; import org.apache.hive.beeline.QFile.QFileBuilder; @@ -241,9 +242,9 @@ public class CoreBeeLineDriver extends CliAdapter { throw new Exception("Exception running or analyzing the results of the query file: " + qFile + "\n" + qFile.getDebugHint(), e); } - + } - + public void runTest(QFile qFile) throws Exception { runTest(qFile, null); } @@ -264,12 +265,15 @@ public class CoreBeeLineDriver extends CliAdapter { } private List<Callable<Void>> initDataSetForTest(QFile qFile) throws Exception { - DatasetParser parser = new DatasetParser(); - parser.parse(qFile.getInputFile()); + + QTestOptionDispatcher dispatcher = new QTestOptionDispatcher(); + QTestDatasetHandler datasetHandler = new QTestDatasetHandler(miniHS2.getHiveConf()); + dispatcher.register("dataset", datasetHandler); + dispatcher.process(qFile.getInputFile()); List<Callable<Void>> commands = new ArrayList<>(); - DatasetCollection datasets = parser.getDatasets(); + DatasetCollection datasets = datasetHandler.getDatasets(); for (String table : datasets.getTables()) { Callable<Void> command = initDataset(table, qFile); if (command != null) { diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java index a2aa8bf..d6bdb77 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java @@ -142,7 +142,7 @@ public class CoreCompareCliDriver extends CliAdapter{ for (String versionFile : versionFiles) { // 1 for "_" after tname; 3 for ".qv" at the end. Version is in between. String versionStr = versionFile.substring(tname.length() + 1, versionFile.length() - 3); - outputs.add(qt.cliInit(new File(queryDirectory, tname + "." + versionStr))); + outputs.add(qt.cliInit(new File(queryDirectory, versionFile))); // TODO: will this work? CommandProcessorResponse response = qt.executeClient(versionFile, fname); if (response.getResponseCode() != 0) { diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index d2c2ccd..c58d9f6 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -75,6 +75,7 @@ import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.processors.HiveCommand; +import org.apache.hadoop.hive.ql.qoption.QTestOptionDispatcher; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; import org.slf4j.Logger; @@ -118,7 +119,7 @@ public class QTestUtil { private boolean isSessionStateStarted = false; - protected CliDriver getCliDriver() { + public CliDriver getCliDriver() { if (cliDriver == null) { throw new RuntimeException("no clidriver"); } @@ -199,7 +200,7 @@ public class QTestUtil { initConf(); - datasetHandler = new QTestDatasetHandler(this, conf); + datasetHandler = new QTestDatasetHandler(conf); testFiles = datasetHandler.getDataDir(conf); conf.set("test.data.dir", datasetHandler.getDataDir(conf)); @@ -541,7 +542,10 @@ public class QTestUtil { public String cliInit(File file) throws Exception { String fileName = file.getName(); - datasetHandler.initDataSetForTest(file, getCliDriver()); + QTestOptionDispatcher dispatcher = new QTestOptionDispatcher(); + dispatcher.register("dataset", datasetHandler); + dispatcher.process(file); + dispatcher.beforeTest(this); if (qTestResultProcessor.shouldNotReuseSession(fileName)) { newSession(false); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetCollection.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetCollection.java index cf3ebd1..606e87f 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetCollection.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetCollection.java @@ -28,6 +28,15 @@ import java.util.stream.Collectors; public class DatasetCollection { private Set<Dataset> coll = new HashSet<Dataset>(); + public DatasetCollection() { + } + + public DatasetCollection(Set<String> datasets) { + for (String datasetName : datasets) { + add(datasetName); + } + } + public void add(Dataset dataset) { coll.add(dataset); } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetParser.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetParser.java deleted file mode 100644 index ea0b0c5..0000000 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetParser.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.dataset; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * DatasetParser: a parser which could parse dataset "hooks" from q files, --!qt:dataset:mydataset - */ -public class DatasetParser { - - private DatasetCollection datasets = new DatasetCollection(); - private static final Logger LOG = LoggerFactory.getLogger("DatasetParser"); - - public static final String DATASET_PREFIX = "--! qt:dataset:"; - - public void parse(File file) { - try (BufferedReader br = new BufferedReader(new FileReader(file))) { - for (String line = br.readLine(); line != null; line = br.readLine()) { - if (line.trim().startsWith(DATASET_PREFIX)) { - Set<String> strDatasets = parseDatasetsFromLine(line); - - for (String strDataset : strDatasets) { - datasets.add(strDataset); - } - } - } - } catch (IOException e) { - LOG.debug( - String.format("io exception while searching for datasets in qfile: %s", e.getMessage())); - } - } - - public DatasetCollection getDatasets() { - return datasets; - } - - public static Set<String> parseDatasetsFromLine(String input) { - Set<String> datasets = new HashSet<String>(); - - input = input.substring(DATASET_PREFIX.length()); - if (!input.trim().isEmpty()) { - datasets.addAll(Arrays.asList(input.split(","))); - } - - return datasets; - } - -} diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/QTestDatasetHandler.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/QTestDatasetHandler.java index 14560e4..ca3c277 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/QTestDatasetHandler.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/QTestDatasetHandler.java @@ -29,24 +29,24 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QTestSystemProperties; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.qoption.QTestOptionHandler; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class QTestDatasetHandler { +public class QTestDatasetHandler implements QTestOptionHandler { private static final Logger LOG = LoggerFactory.getLogger("QTestDatasetHandler"); private File datasetDir; - private QTestUtil qt; private static Set<String> srcTables; + private Set<String> missingTables = new HashSet<>(); - public QTestDatasetHandler(QTestUtil qTestUtil, HiveConf conf) { + public QTestDatasetHandler(HiveConf conf) { // Use path relative to dataDir directory if it is not specified String dataDir = getDataDir(conf); datasetDir = conf.get("test.data.set.files") == null ? new File(dataDir + "/datasets") : new File(conf.get("test.data.set.files")); - this.qt = qTestUtil; } public String getDataDir(HiveConf conf) { @@ -59,28 +59,6 @@ public class QTestDatasetHandler { return dataDir; } - public void initDataSetForTest(File file, CliDriver cliDriver) throws Exception { - synchronized (QTestUtil.class) { - DatasetParser parser = new DatasetParser(); - parser.parse(file); - - DatasetCollection datasets = parser.getDatasets(); - - Set<String> missingDatasets = datasets.getTables(); - missingDatasets.removeAll(getSrcTables()); - if (missingDatasets.isEmpty()) { - return; - } - qt.newSession(true); - for (String table : missingDatasets) { - if (initDataset(table, cliDriver)) { - addSrcTable(table); - } - } - qt.newSession(true); - } - } - public boolean initDataset(String table, CliDriver cliDriver) throws Exception { File tableFile = new File(new File(datasetDir, table), Dataset.INIT_FILE_NAME); String commands = null; @@ -139,4 +117,39 @@ public class QTestDatasetHandler { } } } + + @Override + public void processArguments(String arguments) { + String[] tables = arguments.split(","); + for (String string : tables) { + string = string.trim(); + if(string.length()==0) { + continue; + } + if (srcTables == null || !srcTables.contains(string)) { + missingTables.add(string); + } + } + } + + @Override + public void beforeTest(QTestUtil qt) throws Exception { + if (!missingTables.isEmpty()) { + synchronized (QTestUtil.class) { + qt.newSession(true); + for (String table : missingTables) { + if (initDataset(table, qt.getCliDriver())) { + addSrcTable(table); + } + } + missingTables.clear(); + qt.newSession(true); + } + } + } + + public DatasetCollection getDatasets() { + return new DatasetCollection(missingTables); + } + } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestOptionDispatcher.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestOptionDispatcher.java new file mode 100644 index 0000000..84a41b3 --- /dev/null +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestOptionDispatcher.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.qoption; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.hadoop.hive.ql.QTestUtil; + +public class QTestOptionDispatcher { + + Map<String, QTestOptionHandler> handlers = new HashMap<String, QTestOptionHandler>(); + + public void register(String prefix, QTestOptionHandler datasetHandler) { + if (handlers.containsKey(prefix)) { + throw new RuntimeException(); + } + handlers.put(prefix, datasetHandler); + } + + public void process(File file) { + synchronized (QTestUtil.class) { + parse(file); + } + } + + public void parse(File file) { + Pattern p = Pattern.compile(" *--! ?qt:([a-z]+):?(.*)"); + + try (BufferedReader br = new BufferedReader(new FileReader(file))) { + for (String line = br.readLine(); line != null; line = br.readLine()) { + String l = line.trim(); + Matcher m = p.matcher(l); + if (m.matches()) { + String sub = m.group(1); + String arguments = m.group(2); + if (!handlers.containsKey(sub)) { + throw new IOException("Don't know how to handle " + sub + " line: " + l); + } + handlers.get(sub).processArguments(arguments); + } + } + } catch (IOException e) { + throw new RuntimeException("Error while processing file: " + file, e); + } + } + + public void beforeTest(QTestUtil qt) throws Exception { + for (QTestOptionHandler h : handlers.values()) { + h.beforeTest(qt); + } + } + +} diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetCollection.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestOptionHandler.java similarity index 57% copy from itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetCollection.java copy to itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestOptionHandler.java index cf3ebd1..b67097f 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/dataset/DatasetCollection.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/qoption/QTestOptionHandler.java @@ -15,32 +15,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.apache.hadoop.hive.ql.qoption; -package org.apache.hadoop.hive.ql.dataset; +import org.apache.hadoop.hive.ql.QTestUtil; -import java.util.HashSet; -import java.util.Set; -import java.util.stream.Collectors; +public interface QTestOptionHandler { -/** - * DatasetCollection: utility wrapper class for a set of datasets - */ -public class DatasetCollection { - private Set<Dataset> coll = new HashSet<Dataset>(); - - public void add(Dataset dataset) { - coll.add(dataset); - } - - public void add(String table) { - add(new Dataset(table)); - } + void processArguments(String arguments); - public Set<Dataset> getDatasets() { - return coll; - } + void beforeTest(QTestUtil qt) throws Exception; - public Set<String> getTables() { - return coll.stream().map(d -> d.getTable()).collect(Collectors.toSet()); - } } diff --git a/itests/util/src/test/java/org/apache/hadoop/hive/ql/dataset/TestDatasetParser.java b/itests/util/src/test/java/org/apache/hadoop/hive/ql/dataset/TestDatasetParser.java deleted file mode 100644 index 3668a67..0000000 --- a/itests/util/src/test/java/org/apache/hadoop/hive/ql/dataset/TestDatasetParser.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.dataset; - -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - -import org.junit.Assert; -import org.junit.Test; - -/** - * TestDatasetParser: test class for DataSetParser - */ -public class TestDatasetParser { - - @Test - public void testParseOne() { - Set<String> expected = new HashSet<String>(Arrays.asList("mydataset")); - Assert.assertEquals(expected, DatasetParser - .parseDatasetsFromLine(String.format("%smydataset", DatasetParser.DATASET_PREFIX))); - } - - @Test - public void testParseMultiple() { - Set<String> expected = new HashSet<String>(Arrays.asList("one", "two", "three")); - Assert.assertEquals(expected, DatasetParser - .parseDatasetsFromLine(String.format("%sone,two,three", DatasetParser.DATASET_PREFIX))); - } - - @Test - public void testParseUnique() { - Set<String> expected = new HashSet<String>(Arrays.asList("one", "two")); - Assert.assertEquals(expected, DatasetParser - .parseDatasetsFromLine(String.format("%sone,one,two", DatasetParser.DATASET_PREFIX))); - } - - @Test - public void testParseNone() { - Assert.assertTrue(DatasetParser - .parseDatasetsFromLine(String.format("%s", DatasetParser.DATASET_PREFIX)).isEmpty()); - } -}
