Repository: ambari Updated Branches: refs/heads/branch-dev-patch-upgrade 480792817 -> 608f0b55e
http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java new file mode 100644 index 0000000..a15e5d4 --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserJSONTest.java @@ -0,0 +1,263 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.resources.upload; + +import org.apache.ambari.view.hive20.client.ColumnDescription; +import org.apache.ambari.view.hive20.client.ColumnDescriptionShort; +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl; +import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser; +import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions; +import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.io.StringReader; + +public class DataParserJSONTest { + + @Test + public void testParsePreviewJSON() throws Exception { + String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" }," + + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5.4\" }," + + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" }," + + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }," + + "{\"col1\": \"e\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"8.4\" }," + + "{\"col1\": \"f\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"9.4\" }," + + "{\"col1\": \"g\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"10.4\" }," + + "{\"col1\": \"h\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"11.4\" }," + + "{\"col1\": \"i\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4\" }," + + "{\"col1\": \"j\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"5\" }," + + "{\"col1\": \"k\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6\" }," + + "{\"col1\": \"l\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7\" }," + + "{\"col1\": \"m\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"24.4\" }," + + "{\"col1\": \"n\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"14.4\" }," + + "{\"col1\": \"o\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"34.4\" }," + + "{\"col1\": \"p\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"44.4\" }," + + "{\"col1\": \"q\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"54.4\" }," + + "{\"col1\": \"r\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"64.4\" }" + + "]"; + + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_NUMBER_OF_PREVIEW_ROWS, 7); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions) + ) { + + PreviewData pd = dp.parsePreview(); + Assert.assertNotNull(pd.getPreviewRows()); + Assert.assertNotNull(pd.getHeader()); + Assert.assertEquals(7, pd.getPreviewRows().size()); // header row + preview rows + Assert.assertEquals(14, pd.getHeader().size()); + ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0), + new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1), + new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2), + new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.STRING.toString(), 3), + new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.STRING.toString(), 4), + new ColumnDescriptionImpl("col6", ColumnDescriptionShort.DataTypes.STRING.toString(), 5), + new ColumnDescriptionImpl("col7", ColumnDescriptionShort.DataTypes.STRING.toString(), 6), + new ColumnDescriptionImpl("col8", ColumnDescriptionShort.DataTypes.STRING.toString(), 7), + new ColumnDescriptionImpl("col9", ColumnDescriptionShort.DataTypes.STRING.toString(), 8), + new ColumnDescriptionImpl("col10", ColumnDescriptionShort.DataTypes.STRING.toString(), 9), + new ColumnDescriptionImpl("col11", ColumnDescriptionShort.DataTypes.STRING.toString(), 10), + new ColumnDescriptionImpl("col12", ColumnDescriptionShort.DataTypes.STRING.toString(), 11), + new ColumnDescriptionImpl("col13", ColumnDescriptionShort.DataTypes.STRING.toString(), 12), + new ColumnDescriptionImpl("col14", ColumnDescriptionShort.DataTypes.DOUBLE.toString(), 13)}; + + Row row2 = new Row(new Object[]{"a", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "4.4"}); + Row row3 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "5.4"}); + Row row4 = new Row(new Object[]{"c", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "6.4"}); + Row row5 = new Row(new Object[]{"d", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "7.4"}); + Row row6 = new Row(new Object[]{"e", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "8.4"}); + Row row7 = new Row(new Object[]{"f", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "9.4"}); + Row row8 = new Row(new Object[]{"g", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "10.4"}); + + Row[] rows = { row2, row3, row4, row5, row6, row7, row8}; + + Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray()); + Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray()); + } + } + + /** + * additional columns in rows of JSON are ignored. + * + * @throws IOException + */ + @Test + public void testParsePreviewCSVMoreColumns() throws Exception { + String str = "[ {\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" }," + + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" , \"col14\" : \"43.4\" ,\"col15\" : \"asafsfa\" }," + + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" }," + + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }" + + "]"; + + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString()); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions) + ) { + + PreviewData pd = dp.parsePreview(); + + Row row2 = new Row(new Object[]{"b", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "abcd", "43.4"}); + Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow()); + } + } + + /** + * less columns in json makes them null. + * + * @throws IOException + */ + @Test + public void testParsePreviewCSVLessColumns() throws Exception { + String str = "[ " + + "{\"col1\" : \"a\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"4.4\" }," + + "{\"col1\": \"b\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" }," + + "{\"col1\": \"c\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"6.4\" }," + + "{\"col1\": \"d\", \n\"col2\": \"abcd\" ,\"col3\": \"abcd\" ,\"col4\": \"abcd\" ,\"col5\": \"abcd\" ,\"col6\": \"abcd\" ,\"col7\": \"abcd\" ,\"col8\": \"abcd\" ,\"col9\": \"abcd\" ,\"col10\": \"abcd\" ,\"col11\": \"abcd\" ,\"col12\" : \"abcd\" ,\"col13\" : \"abcd\" ,\"col14\" : \"7.4\" }" + + "]"; + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString()); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions) + ) { + PreviewData pd = dp.parsePreview(); + + Assert.assertNull(pd.getPreviewRows().get(1).getRow()[13]); + } + } + + /** + * illegal json format gives error + * + * @throws IOException + */ + @Test(expected = IllegalArgumentException.class) + public void testWrongJsonFormat() throws Exception { + String str = "[ " + + "{\"col1\" : \"a\", \n\"col2\": \"abcd\" }," + + "{\"col1\": \"b\", \n\"col2\": \"abcd\" }," + + "{\"col1\": \"c\", \n\"col2\": \"abcd\" }," + + "{\"col1\": \"d\",, \n\"col2\": \"abcd\" }" // extra comma in this line + + "]"; + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString()); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions); + ) { + PreviewData pd = dp.parsePreview(); + } + } + + /** + * One row JSON will give embedde column names and 1st row in preview if HEADER.EMBEDDED is selected + * @throws IOException + */ + @Test + public void testParsePreview1RowJSON() throws Exception { + String str = "[ " + + "{\"col1\": \"d\", \n\"col2\": \"abcd\" }" // extra comma in this line + + "]"; + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString()); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions); + ) { + + PreviewData pd = dp.parsePreview(); + Assert.assertNotNull(pd.getPreviewRows()); + Assert.assertNotNull(pd.getHeader()); + Assert.assertEquals(1, pd.getPreviewRows().size()); + Assert.assertEquals(2, pd.getHeader().size()); + ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0), + new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)}; + + Object cols1[] = new Object[2]; + cols1[0] = "d"; + cols1[1] = "abcd"; + Row row1 = new Row(cols1); + + Row[] rows = {row1}; + + Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray()); + Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray()); + } + } + + /** + * One row JSON will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected + * @throws IOException + */ + @Test + public void testParsePreview1RowJSONHeaderProvided() throws Exception { + String str = "[ " + + "{\"col1\": \"d\", \n\"col2\": \"abcd\" }" // extra comma in this line + + "]"; + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.JSON.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString()); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions); + ) { + + PreviewData pd = dp.parsePreview(); + Assert.assertNotNull(pd.getPreviewRows()); + Assert.assertNotNull(pd.getHeader()); + Assert.assertEquals(1, pd.getPreviewRows().size()); + Assert.assertEquals(2, pd.getHeader().size()); + ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.CHAR.toString(), 0), + new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1)}; + + Object cols1[] = new Object[2]; + cols1[0] = "d"; + cols1[1] = "abcd"; + Row row1 = new Row(cols1); + + Row[] rows = {row1}; + + Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray()); + Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray()); + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java new file mode 100644 index 0000000..07e7c7c --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/DataParserXMLTest.java @@ -0,0 +1,295 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.resources.upload; + +import org.apache.ambari.view.hive20.client.ColumnDescription; +import org.apache.ambari.view.hive20.client.ColumnDescriptionShort; +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl; +import org.apache.ambari.view.hive20.resources.uploads.parsers.DataParser; +import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions; +import org.apache.ambari.view.hive20.resources.uploads.parsers.PreviewData; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.io.StringReader; + +public class DataParserXMLTest { + + @Test + public void testParsePreviewXML() throws Exception { + String str = "<table>" + + "<row>" + + "<col name=\"col1\">row1-col1-Value</col>" + + "<col name=\"col2\">row1-col2-Value</col>" + + "<col name=\"col3\">row1-col3-Value</col>" + + "<col name=\"col4\">10</col>" + + "<col name=\"col5\">11</col>" + + "</row>" + + "<row>" + + "<col name=\"col1\">row2-col1-Value</col>" + + "<col name=\"col2\">row2-col2-Value</col>" + + "<col name=\"col3\">row2-col3-Value</col>" + + "<col name=\"col4\">20</col>" + + "<col name=\"col5\">21</col>" + + "</row>" + + "</table>"; + + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString()); + + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions); + ) { + + PreviewData pd = dp.parsePreview(); + Assert.assertNotNull(pd.getPreviewRows()); + Assert.assertNotNull(pd.getHeader()); + Assert.assertEquals(2, pd.getPreviewRows().size()); // header row + preview rows + Assert.assertEquals(5, pd.getHeader().size()); + ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0), + new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.STRING.toString(), 1), + new ColumnDescriptionImpl("col3", ColumnDescriptionShort.DataTypes.STRING.toString(), 2), + new ColumnDescriptionImpl("col4", ColumnDescriptionShort.DataTypes.INT.toString(), 3), + new ColumnDescriptionImpl("col5", ColumnDescriptionShort.DataTypes.INT.toString(), 4) + }; + + Row row2 = new Row(new Object[]{"row1-col1-Value", "row1-col2-Value", "row1-col3-Value", "10", "11"}); + Row row3 = new Row(new Object[]{"row2-col1-Value", "row2-col2-Value", "row2-col3-Value", "20", "21"}); + + Row[] rows = {row2, row3}; + + Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray()); + Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray()); + } + } + + + /** + * additional columns in rows of XML are ignored. + * number of columns are decided by the first row of the table and here second row contains more columns so those are ignored. + * @throws IOException + */ + @Test + public void testParsePreviewCSVMoreColumns() throws Exception { + String str ="<table>" + + "<row>" + + "<col name=\"col1\">row1-col1-Value</col>" + + "<col name=\"col2\">row1-col2-Value</col>" + + "<col name=\"col3\">row1-col3-Value</col>" + + "<col name=\"col4\">10</col>" + + "<col name=\"col5\">11</col>" + + "</row>" + + "<row>" + + "<col name=\"col1\">row2-col1-Value</col>" + + "<col name=\"col2\">row2-col2-Value</col>" + + "<col name=\"col3\">row2-col3-Value</col>" + + "<col name=\"col99\">row2-col99-Value</col>" + // extra colummn + "<col name=\"col100\">row2-col100-Value</col>" + // extra column + "<col name=\"col4\">20</col>" + + "<col name=\"col5\">21</col>" + + "</row>" + + "</table>"; + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString()); + + + try( StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions); + ) { + + + PreviewData pd = dp.parsePreview(); + + Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value","20","21"}); + Assert.assertArrayEquals("More number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow()); + } + } + + /** + * less columns in xml makes them null. + * number of columns are decided by the first row of the table and here second row does not contain col99 and col100 + * columns so those are set to null. + * @throws IOException + */ + @Test + public void testParsePreviewCSVLessColumns() throws Exception { + String str = "<table>" + + "<row>" + + "<col name=\"col1\">row1-col1-Value</col>" + + "<col name=\"col2\">row1-col2-Value</col>" + + "<col name=\"col3\">row1-col3-Value</col>" + + "<col name=\"col99\">row2-col99-Value</col>" + // extra colummn + "<col name=\"col100\">row2-col100-Value</col>" + // extra column + "<col name=\"col4\">10</col>" + + "<col name=\"col5\">11</col>" + + "</row>" + + "<row>" + + "<col name=\"col1\">row2-col1-Value</col>" + + "<col name=\"col2\">row2-col2-Value</col>" + + "<col name=\"col3\">row2-col3-Value</col>" + + "<col name=\"col4\">20</col>" + + "<col name=\"col5\">21</col>" + + "</row>" + + "</table>"; + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString()); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions); + ) { + PreviewData pd = dp.parsePreview(); + + Row row2 = new Row(new Object[]{"row2-col1-Value","row2-col2-Value","row2-col3-Value",null,null,"20","21"}); + Assert.assertArrayEquals("Less number of columns do not give correct result.", row2.getRow(), pd.getPreviewRows().get(1).getRow()); + } + } + + /** + * illegal xml format gives error. adding illegal tag gives error + * + * @throws IOException + */ + @Test(expected = IllegalArgumentException.class) + public void testWrongXMLFormat() throws Exception { + String str = "<table>" + + "<row>" + + "<ccc></ccc>" + // illegal tag. + "<col name=\"col1\">row1-col1-Value</col>" + + "<col name=\"col2\">row1-col2-Value</col>" + + "<col name=\"col3\">row1-col3-Value</col>" + + "<col name=\"col99\">row2-col99-Value</col>" + // extra colummn + "<col name=\"col100\">row2-col100-Value</col>" + // extra column + "<col name=\"col4\">10</col>" + + "<col name=\"col5\">11</col>" + + "</row>" + + "<row>" + + "<col name=\"col1\">row2-col1-Value</col>" + + "<col name=\"col2\">row2-col2-Value</col>" + + "<col name=\"col3\">row2-col3-Value</col>" + + "<col name=\"col4\">20</col>" + + "<col name=\"col5\">21</col>" + + "</row>" + + "</table>"; + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.FIRST_RECORD.toString()); + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions); + ) { + PreviewData pd = dp.parsePreview(); + } + } + + /** + * One row XML will give embedde column names and 1st row in preview if HEADER.EMBEDDED is selected + * @throws IOException + */ + @Test + public void testParsePreview1RowXML() throws Exception { + String str = "<table>" + + "<row>" + + "<col name=\"col1\">row1-col1-Value</col>" + + "<col name=\"col2\">11</col>" + + "</row>" + + "</table>"; + + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.EMBEDDED.toString()); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions); + ) { + + PreviewData pd = dp.parsePreview(); + Assert.assertNotNull(pd.getPreviewRows()); + Assert.assertNotNull(pd.getHeader()); + Assert.assertEquals(1, pd.getPreviewRows().size()); + Assert.assertEquals(2, pd.getHeader().size()); + ColumnDescription[] cd = {new ColumnDescriptionImpl("col1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0), + new ColumnDescriptionImpl("col2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)}; + + Object cols1[] = new Object[2]; + cols1[0] = "row1-col1-Value"; + cols1[1] = "11"; + Row row1 = new Row(cols1); + + Row[] rows = {row1}; + + Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray()); + Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray()); + } + } + + /** + * One row XML will give default column names and 1st row in preview if HEADER.PROVIDED_BY_USER is selected + * @throws IOException + */ + @Test + public void testParsePreview1RowXMLHeaderProvided() throws Exception { + String str = "<table>" + + "<row>" + + "<col name=\"col1\">row1-col1-Value</col>" + + "<col name=\"col2\">11</col>" + + "</row>" + + "</table>"; + + ParseOptions parseOptions = new ParseOptions(); + parseOptions.setOption(ParseOptions.OPTIONS_FILE_TYPE, ParseOptions.InputFileType.XML.toString()); + parseOptions.setOption(ParseOptions.OPTIONS_HEADER, ParseOptions.HEADER.PROVIDED_BY_USER.toString()); + + try( + StringReader sr = new StringReader(str); + DataParser dp = new DataParser(sr, parseOptions) + ) { + + PreviewData pd = dp.parsePreview(); + Assert.assertNotNull(pd.getPreviewRows()); + Assert.assertNotNull(pd.getHeader()); + Assert.assertEquals(1, pd.getPreviewRows().size()); + Assert.assertEquals(2, pd.getHeader().size()); + ColumnDescription[] cd = {new ColumnDescriptionImpl("column1", ColumnDescriptionShort.DataTypes.STRING.toString(), 0), + new ColumnDescriptionImpl("column2", ColumnDescriptionShort.DataTypes.INT.toString(), 1)}; + + Object cols1[] = new Object[2]; + cols1[0] = "row1-col1-Value"; + cols1[1] = "11"; + Row row1 = new Row(cols1); + + Row[] rows = {row1}; + + Assert.assertArrayEquals("Header Not Correct.", cd, pd.getHeader().toArray()); + Assert.assertArrayEquals("Rows Not Correct.", rows, pd.getPreviewRows().toArray()); + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/JsonParserTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/JsonParserTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/JsonParserTest.java new file mode 100644 index 0000000..f775ea5 --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/JsonParserTest.java @@ -0,0 +1,146 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.resources.upload; + +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.resources.uploads.parsers.json.JSONParser; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Iterator; + +public class JsonParserTest { + + @Test(expected = IOException.class) + public void testEmptyStream() throws Exception { + String json = ""; + + try( + StringReader sr = new StringReader(json); + JSONParser jp = new JSONParser(sr, null); + ) { + // PARSING WILL THROW ERROR + } + } + + @Test + public void testEmptyRow() throws Exception { + JsonObject jo = new JsonObject(); + JsonArray ja = new JsonArray(); + ja.add(jo); + String json = ja.toString(); + + try( + StringReader sr = new StringReader(json); + JSONParser jp = new JSONParser(sr, null) + ) { + + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext()); + Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow()); + } + } + + + @Test + public void testEmptyTable() throws Exception { + JsonArray ja = new JsonArray(); + String json = ja.toString(); + + try( + StringReader sr = new StringReader(json); + JSONParser jp = new JSONParser(sr, null); + ) { + Iterator<Row> iterator = jp.iterator(); + Assert.assertEquals("Iterator Empty!", false, iterator.hasNext()); + } + } + + @Test + public void testParse1Row() throws Exception { + JsonObject jo = new JsonObject(); + jo.addProperty("key1","value1"); + jo.addProperty("key2",'c'); + jo.addProperty("key3",10); + jo.addProperty("key4",10.1); + + JsonArray ja = new JsonArray(); + ja.add(jo); + String json = ja.toString(); + + try(StringReader sr = new StringReader(json); + + JSONParser jp = new JSONParser(sr, null) + ) { + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Iterator Empty!", true, iterator.hasNext()); + Row row = iterator.next(); + Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"}); + Assert.assertEquals("Row not equal!", expected, row); + + Assert.assertEquals("Should report no more rows!", false, iterator.hasNext()); + } + } + + @Test + public void testParseMultipleRow() throws Exception { + JsonObject jo1 = new JsonObject(); + jo1.addProperty("key1","value1"); + jo1.addProperty("key2","c"); + jo1.addProperty("key3","10"); + jo1.addProperty("key4","10.1"); + + JsonObject jo2 = new JsonObject(); + jo2.addProperty("key1","value2"); + jo2.addProperty("key2","c2"); + jo2.addProperty("key3","102"); + jo2.addProperty("key4",true); + + + JsonArray ja = new JsonArray(); + ja.add(jo1); + ja.add(jo2); + + String json = ja.toString(); + + + + try( + StringReader sr = new StringReader(json); + JSONParser jp = new JSONParser(sr, null) + ) { + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next()); + + Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next()); + + Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext()); + Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext()); + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVParserTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVParserTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVParserTest.java new file mode 100644 index 0000000..a55bf66 --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVParserTest.java @@ -0,0 +1,313 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.resources.upload; + +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseOptions; +import org.apache.ambari.view.hive20.resources.uploads.parsers.csv.opencsv.OpenCSVParser; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Iterator; + +public class OpenCSVParserTest { + + /** + * no exception in creating csvParser with emtpy stream + * @throws IOException + */ + @Test + public void testEmptyStream() throws Exception { + String csv = ""; + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions()); + ) { + Assert.assertEquals("There should not be any rows.",false, jp.iterator().hasNext()); + } + } + + /** + * in case of csv an empty line is still considered as row + * @throws IOException + */ + @Test + public void testEmptyRow() throws Exception { + String csv = " "; + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions()); + ) { + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Iterator should be Empty", true, iterator.hasNext()); + Assert.assertArrayEquals("Row should not be empty",new Object[]{" "},iterator.next().getRow()); + } + } + + @Test + public void testParse1Row() throws Exception { + String csv = "value1,c,10,10.1"; + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions()); + ) { + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Iterator Empty!", true, iterator.hasNext()); + Row row = iterator.next(); + Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"}); + Assert.assertEquals("Row not equal!", expected, row); + + Assert.assertEquals("Should report no more rows!", false, iterator.hasNext()); + } + } + + @Test + public void testParseMultipleRow() throws Exception { + + String csv = "value1,c,10,10.1\n" + + "value2,c2,102,true"; + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, new ParseOptions()); + ) { + + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next()); + + Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next()); + + Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext()); + Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext()); + } + } + + @Test + public void testQuotedAndEscapedEndline() throws Exception { + + String csv = "\"row1-\ncol1\",1,1.1\n\"row2-\\\ncol1\",2,2.2\n"; + ParseOptions po = new ParseOptions(); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + + Row row = new Row(new Object[]{"row1-\ncol1", "1", "1.1"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + + Row row2 = new Row(new Object[]{"row2-\ncol1", "2", "2.2"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row2, iterator.next()); + + } + } + + @Test + public void testQuotedDoubleQuote() throws Exception { + + String csv = "\"aaa\",\"b\"\"bb\",\"ccc\""; + ParseOptions po = new ParseOptions(); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + + Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + } + } + + @Test + public void testEscapedDoubleQuote() throws Exception { + + String csv = "\"aaa\",\"b\\\"bb\",\"ccc\""; + ParseOptions po = new ParseOptions(); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + + Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + } + } + + + @Test + public void testSpecialEscape() throws Exception { + + String csv = "\"aaa\",\"b$\"bb\",\"ccc\""; + ParseOptions po = new ParseOptions(); + po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$'); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + + Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + } + } + + @Test + public void testSpecialEscapedEscape() throws Exception { + + String csv = "aaa,b$$bb,ccc"; + ParseOptions po = new ParseOptions(); + po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$'); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + + Row row = new Row(new Object[]{"aaa", "b$bb", "ccc"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + } + } + + + @Test + public void testSpecialUnEscapedEscape() throws Exception { + + String csv = "aaa,b$bb,ccc"; + ParseOptions po = new ParseOptions(); + po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'$'); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + + Row row = new Row(new Object[]{"aaa", "bbb", "ccc"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + } + } + + @Test + public void test001Escape() throws Exception { + + String csv = "aaa,b\001\"bb,ccc"; + ParseOptions po = new ParseOptions(); + po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001'); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + Row row = new Row(new Object[]{"aaa", "b\"bb", "ccc"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); } + } + + @Test + public void testSpecialQuote() throws Exception { + + String csv = "\001aaa\001,\001b\001\001bb\001,\001ccc\001"; + ParseOptions po = new ParseOptions(); + po.setOption(ParseOptions.OPTIONS_CSV_QUOTE,'\001'); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + Row row = new Row(new Object[]{"aaa", "b\001bb", "ccc"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + } + } + + @Test + public void testSpaceAsDelimiterAndQuoted() throws Exception { + + String csv = "aaa \"b bb\" ccc\naaa2 bbb2 \"c cc2\""; + ParseOptions po = new ParseOptions(); +// po.setOption(ParseOptions.OPTIONS_CSV_ESCAPE_CHAR,'\001'); + po.setOption(ParseOptions.OPTIONS_CSV_DELIMITER,' '); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + Row row = new Row(new Object[]{"aaa", "b bb", "ccc"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + + Row row2 = new Row(new Object[]{"aaa2", "bbb2", "c cc2"}); + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row2, iterator.next()); + } + } + + @Test + public void testFailedDelimiterEscaped() throws Exception { + + String csv = "aaa,b\\,bb,ccc"; + ParseOptions po = new ParseOptions(); + + try( + StringReader sr = new StringReader(csv); + OpenCSVParser jp = new OpenCSVParser(sr, po); + ) { + + Iterator<Row> iterator = jp.iterator(); + Row row = new Row(new Object[]{"aaa", "b","bb", "ccc"}); // different from Common CSVParser + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", row, iterator.next()); + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVTest.java new file mode 100644 index 0000000..5dbb40d --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/OpenCSVTest.java @@ -0,0 +1,245 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.resources.upload; + +import com.opencsv.CSVParser; +import com.opencsv.CSVReader; +import com.opencsv.CSVWriter; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.io.StringReader; +import java.io.StringWriter; + +public class OpenCSVTest { + + /** + * no exception in creating csvParser with emtpy stream + * + * @throws IOException + */ + @Test + public void testEmptyStream() throws Exception { + String csv = ""; + + CSVParser jp = new CSVParser(); + String[] columns = jp.parseLine(csv); + Assert.assertEquals("Should detect one column.", 1, columns.length); + Assert.assertEquals("Should detect one column with empty value.", new String[]{""}, columns); + } + + /** + * in case of csv an empty line is still considered as row + * + * @throws IOException + */ + @Test + public void testEmptyRow() throws Exception { + String csv = " "; + CSVParser jp = new CSVParser(); + + String[] columns = jp.parseLine(csv); + Assert.assertEquals("One column not detected.", 1, columns.length); + Assert.assertArrayEquals("Row should not be empty", new String[]{" "}, columns); + } + + @Test + public void testParse1Row() throws Exception { + String csv = "value1,c,10,10.1"; + + String[] cols = csv.split(","); + CSVParser jp = new CSVParser(); + String[] columns = jp.parseLine(csv); + Assert.assertEquals("4 columns not detect", 4, columns.length); + Assert.assertArrayEquals("Row not equal!", cols, columns); + } + + @Test + public void testParseMultipleRow() throws Exception { + + String csv = "value1,c,10,10.1\n" + + "value2,c2,102,true"; + + try( + StringReader sr = new StringReader(csv); + CSVReader csvReader = new CSVReader(sr,',','"','\\'); + ) { + String[] row1 = csvReader.readNext(); + String[] row2 = csvReader.readNext(); + + Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"value1", "c", "10", "10.1"}, row1); + + Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2); + } + } + + @Test + public void testParseCustomSeparator() throws Exception { + + String csv = "value1#c#10#10.1\n" + + "value2#c2#102#true"; + + try( + StringReader sr = new StringReader(csv); + CSVReader csvReader = new CSVReader(sr,'#','"','\\'); + ) { + String[] row1 = csvReader.readNext(); + String[] row2 = csvReader.readNext(); + + Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"value1", "c", "10", "10.1"}, row1); + + Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2); + } + } + + + @Test + public void testParseCustomSeparatorAndQuote() throws Exception { + + String csv = "\"valu#e1\"#c#10#10.1\n" + + "value2#c2#102#true"; + + try( + StringReader sr = new StringReader(csv); + CSVReader csvReader = new CSVReader(sr,'#','"','\\'); + ) { + String[] row1 = csvReader.readNext(); + String[] row2 = csvReader.readNext(); + + Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"valu#e1", "c", "10", "10.1"}, row1); + + Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2); + } + } + + @Test + public void testParseCustomSeparatorAndCustomQuote() throws Exception { + + String csv = "\'valu#e1\'#c#10#10.1\n" + + "value2#c2#102#true"; + + try( + StringReader sr = new StringReader(csv); + CSVReader csvReader = new CSVReader(sr,'#','\'','\\'); + ) { + String[] row1 = csvReader.readNext(); + String[] row2 = csvReader.readNext(); + String[] row3 = csvReader.readNext(); + + Assert.assertArrayEquals("Failed to match 1st row!",new String[]{"valu#e1", "c", "10", "10.1"}, row1); + + Assert.assertArrayEquals("Failed to match 2nd row!",new String[]{"value2", "c2", "102", "true"}, row2); + + Assert.assertArrayEquals("should match Null", null, row3); + } + } + + @Test + public void testWriter() throws Exception { + + String csv = "\'valu#e1\'#c#10#10.1\n" + + "value2#c2#102#true"; + + try( + StringReader sr = new StringReader(csv); + CSVReader csvReader = new CSVReader(sr,'#','\'','\\'); + StringWriter sw = new StringWriter(); + CSVWriter csvWriter = new CSVWriter(sw); + ) { + String[] row1 = csvReader.readNext(); + csvWriter.writeNext(row1); + String[] row2 = csvReader.readNext(); + csvWriter.writeNext(row2); + + Assert.assertEquals("CSVWriter failed.","\"valu#e1\",\"c\",\"10\",\"10.1\"\n" + + "\"value2\",\"c2\",\"102\",\"true\"\n", sw.getBuffer().toString()); + } + } + + @Test + public void testWriterCustomSeparator() throws Exception { + + String csv = "\'valu#e1\'#c#10#10.1\n" + + "value2#c2#102#true"; + + try( + StringReader sr = new StringReader(csv); + CSVReader csvReader = new CSVReader(sr,'#','\'','\\'); + StringWriter sw = new StringWriter(); + CSVWriter csvWriter = new CSVWriter(sw,'$'); + ) { + String[] row1 = csvReader.readNext(); + csvWriter.writeNext(row1); + String[] row2 = csvReader.readNext(); + csvWriter.writeNext(row2); + + Assert.assertEquals("CSVWriter failed.","\"valu#e1\"$\"c\"$\"10\"$\"10.1\"\n" + + "\"value2\"$\"c2\"$\"102\"$\"true\"\n", sw.getBuffer().toString()); + } + } + + @Test + public void testWriterCustomSeparatorAndEnline() throws Exception { + + String csv = "value1,c,10,10.1\n" + + "value2,c2,102,true"; + + try( + StringReader sr = new StringReader(csv); + CSVReader csvReader = new CSVReader(sr,',','\'','\\'); + StringWriter sw = new StringWriter(); + CSVWriter csvWriter = new CSVWriter(sw,'\002',',',"\003"); + ) { + String[] row1 = csvReader.readNext(); + csvWriter.writeNext(row1,false); + String[] row2 = csvReader.readNext(); + csvWriter.writeNext(row2,false); + + Assert.assertEquals("CSVWriter failed.","value1\002c\00210\00210.1\003" + + "value2\002c2\002102\002true\003", sw.getBuffer().toString()); + } + } + + @Test + public void testWriterQuote() throws Exception { + + String csv = "val#ue1,c,10,10.1\n" + + "'val,ue2',c2,102,true\n" + + "val\002ue3,c\0033,103,false"; + + try( + StringReader sr = new StringReader(csv); + CSVReader csvReader = new CSVReader(sr,',','\'','\\'); + StringWriter sw = new StringWriter(); + CSVWriter csvWriter = new CSVWriter(sw,'\002','\'',"\003"); + ) { + String[] row1 = csvReader.readNext(); + csvWriter.writeNext(row1,false); + String[] row2 = csvReader.readNext(); + csvWriter.writeNext(row2,false); + String[] row3 = csvReader.readNext(); + csvWriter.writeNext(row3,false); + + Assert.assertEquals("CSVWriter failed.","val#ue1\u0002c\u000210\u000210.1\u0003" + + "val,ue2\u0002c2\u0002102\u0002true\u0003" + + "'val\u0002ue3'\u0002c\u00033\u0002103\u0002false\u0003", sw.getBuffer().toString()); + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/ParseUtilsTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/ParseUtilsTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/ParseUtilsTest.java new file mode 100644 index 0000000..12a7037 --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/ParseUtilsTest.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.view.hive20.resources.upload; + +import org.apache.ambari.view.hive20.resources.uploads.parsers.ParseUtils; +import org.junit.Assert; +import org.junit.Test; + +public class ParseUtilsTest { + @Test + public void testDateFormats() { + Assert.assertTrue(ParseUtils.isDate("1970-01-01")); + Assert.assertTrue(ParseUtils.isDate("1970-01-01 ")); + Assert.assertTrue(ParseUtils.isDate("0001-1-3")); + Assert.assertTrue(ParseUtils.isDate("1996-1-03")); + Assert.assertTrue(ParseUtils.isDate("1996-01-3")); + Assert.assertTrue(ParseUtils.isDate("1996-10-3")); + Assert.assertFalse(ParseUtils.isDate("1970-01-01 01:01:01")); + Assert.assertFalse(ParseUtils.isDate("1970-01-01 23:59:59.999999")); + Assert.assertFalse(ParseUtils.isDate("1970/01/01")); + Assert.assertFalse(ParseUtils.isDate("01-01-1970")); + Assert.assertFalse(ParseUtils.isDate("1970-13-01")); + Assert.assertFalse(ParseUtils.isDate("1970-01-32")); + Assert.assertFalse(ParseUtils.isDate("01/01/1970")); + Assert.assertFalse(ParseUtils.isDate("001-1-3")); + } + + @Test + public void testTimestampFormats() { + Assert.assertFalse(ParseUtils.isTimeStamp("1999-11-30")); + Assert.assertFalse(ParseUtils.isTimeStamp("1999-12-31 23:59")); + Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59")); + Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59.100")); + Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59.999999")); + Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59.99999999")); + Assert.assertTrue(ParseUtils.isTimeStamp("1999-12-31 23:59:59.999999999")); + Assert.assertTrue(ParseUtils.isTimeStamp("1999-10-31 23:59:59.999999999")); + Assert.assertFalse(ParseUtils.isTimeStamp("1999-12-31 23:59:59.9999999999")); + Assert.assertFalse(ParseUtils.isTimeStamp("1999/12/31 23:59:59.9999999999")); + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java new file mode 100644 index 0000000..1a0d34a --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/QueryGeneratorTest.java @@ -0,0 +1,108 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.resources.upload; + +import org.apache.ambari.view.hive.resources.uploads.query.RowFormat; +import org.apache.ambari.view.hive20.client.ColumnDescription; +import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl; +import org.apache.ambari.view.hive20.resources.uploads.HiveFileType; +import org.apache.ambari.view.hive20.resources.uploads.query.DeleteQueryInput; +import org.apache.ambari.view.hive20.resources.uploads.query.InsertFromQueryInput; +import org.apache.ambari.view.hive20.resources.uploads.query.QueryGenerator; +import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo; +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +public class QueryGeneratorTest { + @Test + public void testCreateTextFile() { + + List<ColumnDescriptionImpl> cdl = new ArrayList<>(4); + cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10)); + cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1)); + cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5)); + cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40)); + cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4)); + + TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.TEXTFILE, new RowFormat(',', '\\')); + + QueryGenerator qg = new QueryGenerator(); + Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING," + + " col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','" + + " ESCAPED BY '\\\\' STORED AS TEXTFILE;",qg.generateCreateQuery(ti)); + } + + @Test + public void testCreateORC() { + + List<ColumnDescriptionImpl> cdl = new ArrayList<>(4); + cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10)); + cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1)); + cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5)); + cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40)); + cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4)); + + TableInfo ti = new TableInfo("databaseName", "tableName", cdl, HiveFileType.ORC, new RowFormat(',', '\\')); + + QueryGenerator qg = new QueryGenerator(); + Assert.assertEquals("Create query for text file not correct ","CREATE TABLE tableName (col1 CHAR(10), col2 STRING, col3 DECIMAL(10,5), col4 VARCHAR(40), col5 INT) STORED AS ORC;",qg.generateCreateQuery(ti)); + } + + @Test + public void testInsertWithoutUnhexFromQuery() { + List<ColumnDescriptionImpl> cdl = new ArrayList<>(4); + cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10)); + cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1)); + cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5)); + cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40)); + cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4)); + + InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.FALSE); + + QueryGenerator qg = new QueryGenerator(); + Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT col1, col2, col3, col4, col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi)); + } + + @Test + public void testInsertWithUnhexFromQuery() { + List<ColumnDescriptionImpl> cdl = new ArrayList<>(4); + cdl.add(new ColumnDescriptionImpl("col1", ColumnDescription.DataTypes.CHAR.toString(), 0, 10)); + cdl.add(new ColumnDescriptionImpl("col2", ColumnDescription.DataTypes.STRING.toString(), 1)); + cdl.add(new ColumnDescriptionImpl("col3", ColumnDescription.DataTypes.DECIMAL.toString(), 2, 10, 5)); + cdl.add(new ColumnDescriptionImpl("col4", ColumnDescription.DataTypes.VARCHAR.toString(), 3, 40)); + cdl.add(new ColumnDescriptionImpl("col5", ColumnDescription.DataTypes.INT.toString(), 4)); + + InsertFromQueryInput ifqi = new InsertFromQueryInput("fromDB","fromTable","toDB","toTable", cdl, Boolean.TRUE); + + QueryGenerator qg = new QueryGenerator(); + Assert.assertEquals("insert from one table to another not correct ","INSERT INTO TABLE toDB.toTable SELECT UNHEX(col1), UNHEX(col2), col3, UNHEX(col4), col5 FROM fromDB.fromTable;",qg.generateInsertFromQuery(ifqi)); + } + + @Test + public void testDropTableQuery() { + + DeleteQueryInput deleteQueryInput = new DeleteQueryInput("dbName","tableName"); + + QueryGenerator qg = new QueryGenerator(); + Assert.assertEquals("drop table query not correct ","DROP TABLE dbName.tableName;",qg.generateDropTableQuery(deleteQueryInput )); + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java new file mode 100644 index 0000000..340a921 --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/TableDataReaderTest.java @@ -0,0 +1,127 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.resources.upload; + +import org.apache.ambari.view.hive20.client.ColumnDescription; +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.resources.uploads.ColumnDescriptionImpl; +import org.apache.ambari.view.hive20.resources.uploads.TableDataReader; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; + +public class TableDataReaderTest { + + private class RowIter implements Iterator<Row> { + int numberOfRows; + int numberOfCols; + int index = 0 ; + ArrayList<Row> rows = new ArrayList<Row>(); + public RowIter(int numberOfRows, int numberOfCols){ + this.numberOfRows = numberOfRows; + this.numberOfCols = numberOfCols; + int x = 0 ; + for(int i = 0; i < this.numberOfRows; i++ ){ + Object [] objArray = new Object[10]; + for(int j = 0; j < this.numberOfCols; j++ ){ + objArray[j] = x++ + "" ; + } + Row row = new Row(objArray); + rows.add(row); + } + } + @Override + public boolean hasNext() { + return index < numberOfRows; + } + + @Override + public Row next() { + return rows.get(index++); + } + + @Override + public void remove() { + throw new RuntimeException("Operation not supported."); + } + + @Override + public String toString() { + return "RowIter{" + + "index=" + index + + ", rows=" + rows + + '}'; + } + } + + @Test + public void testCSVReader() throws IOException { + RowIter rowIter = new RowIter(10,10); + List<ColumnDescriptionImpl> colDescs = new LinkedList<>(); + for(int i = 0 ; i < 10 ; i++ ) { + ColumnDescriptionImpl cd = new ColumnDescriptionImpl("col" + (i+1) , ColumnDescription.DataTypes.STRING.toString(), i); + colDescs.add(cd); + } + + TableDataReader tableDataReader = new TableDataReader(rowIter, colDescs, false); + + char del = TableDataReader.CSV_DELIMITER; + char[] first10 = {'0', del, '1', del, '2', del, '3', del, '4', del}; + char [] buf = new char[10]; + tableDataReader.read(buf,0,10); + + Assert.assertArrayEquals(first10,buf); + + char[] next11 = {'5', del, '6', del, '7', del, '8', del, '9', '\n', '1'}; //"5,6,7,8,9\n1".toCharArray(); + char [] buf1 = new char[11]; + tableDataReader.read(buf1,0,11); + + Assert.assertArrayEquals(next11,buf1); + + // read it fully + while( tableDataReader.read(buf,0,10) != -1 ); + + char [] last10 = {'9', '7', del, '9', '8', del, '9', '9', '\n', del}; //"97,98,99\n,".toCharArray(); // last comma is the left over of previous read. + + Assert.assertArrayEquals(last10,buf); + } + + @Test + public void testEmptyCSVReader() throws IOException { + RowIter rowIter = new RowIter(0,0); + + TableDataReader tableDataReader = new TableDataReader(rowIter, null, false); + + char[] first10 = new char [10]; + char [] buf = new char[10]; + for( int i = 0 ; i < 10 ; i++ ){ + first10[i] = '\0'; + buf[i] = '\0'; + } + + tableDataReader.read(buf,0,10); + + Assert.assertArrayEquals(first10,buf); + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/XMLParserTest.java ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/XMLParserTest.java b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/XMLParserTest.java new file mode 100644 index 0000000..bf79a4a --- /dev/null +++ b/contrib/views/hive20/src/test/java/org/apache/ambari/view/hive20/resources/upload/XMLParserTest.java @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.view.hive20.resources.upload; + +import org.apache.ambari.view.hive20.client.Row; +import org.apache.ambari.view.hive20.resources.uploads.parsers.xml.XMLParser; +import org.junit.Assert; +import org.junit.Test; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Iterator; + +public class XMLParserTest { + + @Test(expected = IOException.class) + public void testEmptyStream() throws Exception { + String xml = ""; + + try( + StringReader sr = new StringReader(xml); + XMLParser jp = new XMLParser(sr, null); + ) { + // creation of XMLParser will throw exception. + } + } + + @Test + public void testEmptyRow() throws Exception { + String xml = "<table><row></row></table>"; + try( + StringReader sr = new StringReader(xml); + XMLParser jp = new XMLParser(sr, null); + ) { + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Iterator should not be Empty", true, iterator.hasNext()); + Assert.assertArrayEquals("Row should be empty",new Object[]{},iterator.next().getRow()); + } + } + + + @Test + public void testEmptyTable() throws Exception { + String xml = "<table></table>"; + + try( + StringReader sr = new StringReader(xml); + XMLParser jp = new XMLParser(sr, null); + ) { + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Iterator Empty!", false, iterator.hasNext()); + } + } + + @Test + public void testParse1Row() throws Exception { + + String xml = + "<table>" + + "<row>" + + "<col name=\"key1\">value1</col>" + + "<col name=\"key2\">c</col>" + + "<col name=\"key3\">10</col>" + + "<col name=\"key4\">10.1</col>" + + "</row>" + + "</table>" ; + + try( + StringReader sr = new StringReader(xml); + XMLParser jp = new XMLParser(sr, null) + ) { + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Iterator Empty!", true, iterator.hasNext()); + Row row = iterator.next(); + Row expected = new Row(new Object[]{"value1", "c", "10", "10.1"}); + Assert.assertEquals("Row not equal!", expected, row); + + Assert.assertEquals("Should report no more rows!", false, iterator.hasNext()); + } + } + + @Test + public void testParseMultipleRow() throws Exception { + String xml = + "<table>" + + "<row>" + + "<col name=\"key1\">value1</col>" + + "<col name=\"key2\">c</col>" + + "<col name=\"key3\">10</col>" + + "<col name=\"key4\">10.1</col>" + + "</row>" + + "<row>" + + "<col name=\"key1\">value2</col>" + + "<col name=\"key2\">c2</col>" + + "<col name=\"key3\">102</col>" + + "<col name=\"key4\">true</col>" + + "</row>" + + "</table>" ; + + try( + StringReader sr = new StringReader(xml); + XMLParser jp = new XMLParser(sr, null) + ) { + Iterator<Row> iterator = jp.iterator(); + + Assert.assertEquals("Failed to detect 1st row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 1st row!", new Row(new Object[]{"value1", "c", "10", "10.1"}), iterator.next()); + + Assert.assertEquals("Failed to detect 2nd row!", true, iterator.hasNext()); + Assert.assertEquals("Failed to match 2nd row!", new Row(new Object[]{"value2", "c2", "102", Boolean.TRUE.toString()}), iterator.next()); + + Assert.assertEquals("Failed to detect end of rows!", false, iterator.hasNext()); + Assert.assertEquals("Failed to detect end of rows 2nd time!", false, iterator.hasNext()); + } + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/hive20/src/test/rest/postman/hive2.postman_collection.json ---------------------------------------------------------------------- diff --git a/contrib/views/hive20/src/test/rest/postman/hive2.postman_collection.json b/contrib/views/hive20/src/test/rest/postman/hive2.postman_collection.json new file mode 100644 index 0000000..c7ce0c9 --- /dev/null +++ b/contrib/views/hive20/src/test/rest/postman/hive2.postman_collection.json @@ -0,0 +1,107 @@ +{ + "variables": [], + "info": { + "name": "hive2", + "_postman_id": "d3d966bf-9112-9017-908b-7bc820a5a962", + "description": "", + "schema": "https://schema.getpostman.com/json/collection/v2.0.0/collection.json" + }, + "item": [ + { + "name": "fetch table", + "request": { + "auth": { + "type": "basic", + "basic": { + "username": "admin", + "password": "admin", + "saveHelperData": true, + "showPassword": false + } + }, + "url": "http://c6401.ambari.apache.org:8080/api/v1/views/HIVE/versions/1.5.0/instances/AUTO_HIVE_INSTANCE/resources/ddl/databases/d1/tables/t2/info?_=1481634018195", + "method": "GET", + "header": [ + { + "key": "X-Requested-By", + "value": "ambari", + "description": "" + }, + { + "key": "Authorization", + "value": "Basic YWRtaW46YWRtaW4=", + "description": "" + } + ], + "body": {}, + "description": "fetch d1.t2 table" + }, + "response": [] + }, + { + "name": "create new table table", + "request": { + "auth": { + "type": "basic", + "basic": { + "username": "admin", + "password": "admin", + "saveHelperData": true, + "showPassword": false + } + }, + "url": "http://c6401.ambari.apache.org:8080/api/v1/views/HIVE/versions/1.5.0/instances/AUTO_HIVE_INSTANCE/resources/ddl/databases/d1/tables", + "method": "POST", + "header": [ + { + "key": "X-Requested-By", + "value": "ambari", + "description": "" + }, + { + "key": "Authorization", + "value": "Basic YWRtaW46YWRtaW4=", + "description": "" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"database\": \"d1\",\n \"table\": \"t7\",\n \"columns\": [\n {\n \"name\": \"col_name1\",\n \"type\": \"string\",\n \"comment\": \"col_name1 comment\"\n },\n {\n \"name\": \"col_name2\",\n \"type\": \"decimal(10,2)\",\n \"comment\": \"col_name2 comment\"\n }\n ],\n \"ddl\": \"CREATE TABLE `t2`(\\n `col_name1` string COMMENT 'col_name1 comment', \\n `col_name2` decimal(10,2) COMMENT 'col_name2 comment')\\nCOMMENT 'table t1 comment'\\nPARTITIONED BY ( \\n `col_name3` string COMMENT 'col_name3 comment', \\n `col_name4` char(1) COMMENT 'col_name4 comment')\\nCLUSTERED BY ( \\n col_name1, \\n col_name2) \\nSORTED BY ( \\n col_name1 ASC, \\n col_name2 DESC) \\nINTO 5 BUCKETS\\nROW FORMAT DELIMITED \\n FIELDS TERMINATED BY ',' \\nWITH SERDEPROPERTIES ( \\n 'escape.delim'='\\\\\\\\') \\nSTORED AS INPUTFORMAT \\n 'org.apache.hadoop.mapred.SequenceFileInputFormat' \\nOUTPUTFORMAT \\n 'or g.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'\\nLOCATION\\n 'hdfs://c6401.ambari.apache.org:8020/user/hive/tables/d1/t1'\\nTBLPROPERTIES (\\n 'NO_AUTO_COMPACTION'='true', \\n 'immutable'='false', \\n 'orc.compress'='SNAPPY', \\n 'transient_lastDdlTime'='1481520077')\\n\",\n \"partitionInfo\": {\n \"columns\": [\n {\n \"name\": \"col_name4\",\n \"type\": \"char(1)\",\n \"comment\": \"col_name4 comment\"\n },\n {\n \"name\": \"col_name3\",\n \"type\": \"string\",\n \"comment\": \"col_name3 comment\"\n }\n ]\n },\n \"detailedInfo\": {\n \"dbName\": \"d1\",\n \"owner\": \"admin\",\n \"createTime\": \"Mon Dec 12 05:21:17 UTC 2016\",\n \"lastAccessTime\": \"UNKNOWN\",\n \"retention\": \"0\",\n \"tableType\": \"MANAGED_TABLE\",\n \"location\": \"hdfs://c6401.ambari.apache.org:8020/user/hive/tables/d1/t1\",\n \"parameters\": {\n \"immutable\": \"false\",\n \"orc.compress\": \"SNAPPY\",\n \"transient_lastDdlTime\": \"1481520077\",\n \"NO_AUTO_COMPACTION\": \"true\",\n \"comment\": \"table t1 comment\",\n \"SORTBUCKETCOLSPREFIX\": \"TRUE\"\n }\n },\n \"storageInfo\": {\n \"serdeLibrary\": \"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe\",\n \"inputFormat\": \"org.apache.hadoop.mapred.SequenceFileInputFormat\",\n \"outputFormat\": \"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat\",\n \"compressed\": \"No\",\n \"numBuckets\": \"5\",\n \"bucketCols\": [\n \"col_name1\",\n \" col_name2\"\n ],\n \"sortCols\": [\n {\n \"columnName\": \"col_name1\",\n \"order\": \"ASC\"\n },\n {\n \"columnName\": \"col_name2\",\n \"order\": \"DESC\"\n }\n ],\n \"parameters\": {\n \"escape.delim\": \"\\\\\\\\\",\n \"field.delim\": \" ,\",\n \"serialization.format\": \",\"\n }\n }\n }" + }, + "description": "create new table table" + }, + "response": [] + }, + { + "name": "delete table", + "request": { + "auth": { + "type": "basic", + "basic": { + "username": "admin", + "password": "admin", + "saveHelperData": true, + "showPassword": false + } + }, + "url": "http://c6401.ambari.apache.org:8080/api/v1/views/HIVE/versions/1.5.0/instances/AUTO_HIVE_INSTANCE/resources/ddl/databases/d1/tables/t9", + "method": "DELETE", + "header": [ + { + "key": "X-Requested-By", + "value": "ambari", + "description": "" + }, + { + "key": "Authorization", + "value": "Basic YWRtaW46YWRtaW4=", + "description": "" + } + ], + "body": {}, + "description": "delete d1.t2 table" + }, + "response": [] + } + ] +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/contrib/views/pom.xml ---------------------------------------------------------------------- diff --git a/contrib/views/pom.xml b/contrib/views/pom.xml index b6d4176..7f02a43 100644 --- a/contrib/views/pom.xml +++ b/contrib/views/pom.xml @@ -48,6 +48,7 @@ <module>storm</module> <module>hueambarimigration</module> <module>hive-next</module> + <module>hive20</module> <module>wfmanager</module> <!--ambari-views-package should be last in the module list for it to function properly--> <module>ambari-views-package</module> http://git-wip-us.apache.org/repos/asf/ambari/blob/853a1ce7/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index d6418ff..dcdbab3 100644 --- a/pom.xml +++ b/pom.xml @@ -320,6 +320,8 @@ <exclude>contrib/views/commons/src/main/resources/ui/*/tests/**/vendor/**</exclude> <exclude>contrib/views/hive-next/src/main/resources/ui/hive-web/vendor/codemirror/**</exclude> <exclude>contrib/views/hive-next/src/main/resources/ui/hive-web/.bowerrc</exclude> + <exclude>contrib/views/hive20/src/main/resources/ui/vendor/codemirror/**</exclude> + <exclude>contrib/views/hive20/src/main/resources/ui/.bowerrc</exclude> <exclude>contrib/views/files/src/main/resources/ui/.bowerrc</exclude> <exclude>contrib/views/files/src/main/resources/ui/bower_components/**</exclude> <exclude>contrib/views/files/src/main/resources/ui/node/**</exclude>
